def __init__(self, operation, username, count_inputs, count_results, burst, no_of_op_in_group): super(OperationOverlayDetails, self).__init__() self.operation_id = operation.id self.metadata['operation_id'] = { "name": "Entity id", "disabled": "True" } self.operation_status = operation.status self.metadata['operation_status'] = { "name": "Status", "disabled": "True" } self.start_date = date2string(operation.start_date) self.metadata['start_date'] = { "name": "Start date", "disabled": "True" } self.end_date = "None" if operation.completion_date is None else date2string( operation.completion_date) self.metadata['end_date'] = {"name": "End date", "disabled": "True"} self.result_datatypes = count_results self.metadata['result_datatypes'] = { "name": "No. of resulted DataTypes", "disabled": "True" } self.input_datatypes = count_inputs self.metadata['input_datatypes'] = { "name": "No. of input DataTypes", "disabled": "True" } ### Now set/update generic fields self.gid = operation.gid self.author = username self.count = no_of_op_in_group self.burst_name = burst.name if burst is not None else "" self.operation_type = self.compute_operation_name( operation.algorithm.algo_group.group_category.displayname, operation.algorithm.algo_group.displayname, operation.algorithm.name) group_id = operation.operation_group.id if operation.operation_group is not None else None self.operation_group_id = group_id self.operation_label = (operation.operation_group.name if operation.operation_group is not None else operation.user_group) self.metadata[self.CODE_OPERATION_TAG]["disabled"] = 'True' group_value = operation.operation_group.name if operation.operation_group is not None else None self.operation_group_name = group_value self.metadata[self.CODE_OPERATION_GROUP_NAME]["disabled"] = 'True'
def store(self, burst_config, scalars_only=False, store_references=True): self.name.store(burst_config.name) self.status.store(burst_config.status) self.error_message.store(burst_config.error_message or 'None') self.start_time.store(date2string(burst_config.start_time)) self.finish_time.store(date2string(burst_config.finish_time)) self.simulator.store(uuid.UUID(burst_config.simulator_gid)) self.range1.store(burst_config.range1) self.range2.store(burst_config.range2)
def test_date2string(self): """ Check the date2string method for various inputs. """ date_input = datetime.datetime(1999, 3, 16, 18, 20, 33, 100000) assert date2string(date_input, complex_format=False) == '03-16-1999',\ "Did not get expected string from datetime conversion object." custom_format = "%Y" assert date2string(date_input, date_format=custom_format) == '1999',\ "Did not get expected string from datetime conversion object." assert date2string(date_input, complex_format=True) == '1999-03-16,18-20-33.100000',\ "Did not get expected string from datetime conversion object." assert "None" == date2string(None), "Expected to return 'None' for None input."
def __init__(self, operation, username, count_inputs, count_results, burst, no_of_op_in_group, op_pid): super(OperationOverlayDetails, self).__init__() self.operation_id = operation.id self.metadata['operation_id'] = {"name": "Entity id", "disabled": "True"} self.operation_status = operation.status self.metadata['operation_status'] = {"name": "Status", "disabled": "True"} self.start_date = date2string(operation.start_date) self.metadata['start_date'] = {"name": "Start date", "disabled": "True"} self.end_date = "None" if operation.completion_date is None else date2string(operation.completion_date) self.metadata['end_date'] = {"name": "End date", "disabled": "True"} self.result_datatypes = count_results self.metadata['result_datatypes'] = {"name": "No. of resulted DataTypes", "disabled": "True"} self.input_datatypes = count_inputs self.metadata['input_datatypes'] = {"name": "No. of input DataTypes", "disabled": "True"} # If the operation was executed in another process or as a cluster then show the pid/job_id if op_pid is not None: if op_pid.pid is not None: self.pid = op_pid.pid self.metadata['pid'] = {"name": "Process pid", "disabled": "True"} elif op_pid.job_id is not None: self.job_id = op_pid.job_id self.metadata['job_id'] = {"name": "Cluster job Id", "disabled": "True"} ### Now set/update generic fields self.gid = operation.gid self.author = username self.count = no_of_op_in_group self.burst_name = burst.name if burst is not None else "" self.operation_type = self.compute_operation_name(operation.algorithm.algorithm_category.displayname, operation.algorithm.displayname) group_id = operation.operation_group.id if operation.operation_group is not None else None self.operation_group_id = group_id self.operation_label = (operation.operation_group.name if operation.operation_group is not None else operation.user_group) self.metadata[self.CODE_OPERATION_TAG]["disabled"] = 'True' group_value = operation.operation_group.name if operation.operation_group is not None else None self.operation_group_name = group_value self.metadata[self.CODE_OPERATION_GROUP_NAME]["disabled"] = 'True'
def store_view_model(view_model, base_dir): # type: (ViewModel, str) -> str """ Completely store any ViewModel object to the directory specified by base_dir. It works recursively because there are view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ h5_path = path_for(base_dir, ViewModelH5, view_model.gid, type(view_model).__name__) with ViewModelH5(h5_path, view_model) as h5_file: h5_file.store(view_model) h5_file.type.store(get_full_class_name(type(view_model))) h5_file.create_date.store(date2string(datetime.now())) if hasattr(view_model, "generic_attributes"): h5_file.store_generic_attributes(view_model.generic_attributes) else: # For HasTraits not inheriting from ViewModel (e.g. Linear) h5_file.store_generic_attributes(GenericAttributes()) references = h5_file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): store_view_model(model_attr[idx], base_dir) else: store_view_model(model_attr, base_dir) return h5_path
def _serialize_value(value): """ This method takes a value which will be stored as meta-data and apply some transformation if necessary :param value: value which is planned to be stored :returns: value to be stored NOTE: this method was a part of TVB 1.0 hdf5storage manager, but since this script needs to be independent of current storage manager, we duplicate it here. """ if value is None: return '' # Force unicode strings to simple strings. if isinstance(value, unicode): return str(value) # Transform boolean to string and prefix it elif isinstance(value, bool): return BOOL_VALUE_PREFIX + str(value) # Transform date to string and append prefix elif isinstance(value, datetime): return DATETIME_VALUE_PREFIX + date2string( value, date_format=DATE_TIME_FORMAT) else: return value
def _populate_values(data_list, type_, category_key, complex_dt_attributes=None): """ Populate meta-data fields for data_list (list of DataTypes). Private method, to be called recursively. It will receive a list of Attributes, and it will populate 'options' entry with data references from DB. """ values = [] all_field_values = [] for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list: # Here we only populate with DB data, actual # XML check will be done after select and submit. actual_entity = dao.get_generic_entity(type_, entity_gid, "gid") display_name = '' if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType): display_name = actual_entity[0].display_name display_name += ' - ' + (subject or "None ") if group: display_name += ' - From: ' + str(group) else: display_name += utils.date2string(completion_date) if gr_name: display_name += ' - ' + str(gr_name) display_name += ' - ID:' + str(id_) all_field_values.append(str(entity_gid)) values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid}) if complex_dt_attributes is not None: ### TODO apply filter on sub-attributes values[-1][KEY_ATTRIBUTES] = complex_dt_attributes # this is the copy of complex dtype attributes on all db options if category_key is not None: category = dao.get_category_by_id(category_key) if not category.display and not category.rawinput and len(data_list) > 1: values.insert(0, {KEY_NAME: "All", KEY_VALUE: ','.join(all_field_values)}) return values
def fill_from_post(self, post_data): super(TraitUploadField, self).fill_from_post(post_data) if self.data.file is None: self.data = None return project = dao.get_project_by_id(self.owner.project_id) temporary_storage = self.files_helper.get_project_folder( project, self.files_helper.TEMP_FOLDER) file_name = None try: uq_name = utils.date2string(datetime.now(), True) + '_' + str(0) file_name = TEMPORARY_PREFIX + uq_name + '_' + self.data.filename file_name = os.path.join(temporary_storage, file_name) with open(file_name, 'wb') as file_obj: file_obj.write(self.data.file.read()) except Exception as excep: # TODO: is this handled properly? self.files_helper.remove_files([file_name]) excep.message = 'Could not continue: Invalid input files' raise excep if file_name: self.data = file_name self.owner.temporary_files.append(file_name)
def test_date2string(self): """ Check the date2string method for various inputs. """ date_input = datetime.datetime(1999, 3, 16, 18, 20, 33, 100000) self.assertEqual(date2string(date_input, complex_format=False), '03-16-1999', "Did not get expected string from datetime conversion object.") custom_format = "%Y" self.assertEqual(date2string(date_input, date_format=custom_format), '1999', "Did not get expected string from datetime conversion object.") self.assertEqual(date2string(date_input, complex_format=True), '1999-03-16,18-20-33.100000', "Did not get expected string from datetime conversion object.") self.assertEqual("None", date2string(None), "Expected to return 'None' for None input.")
def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value): """ Find all DataTypes (including the linked ones and the groups) relevant for the current project. In case of a problem, will return an empty list. """ metadata_list = [] dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value) for dt in dt_list: # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects data = {} is_group = False group_op = None dt_entity = dao.get_datatype_by_gid(dt.gid) if dt_entity is None: self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt)) continue ## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None: is_group = True group_op = dt.parent_operation.operation_group # All these fields are necessary here for dynamic Tree levels. data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id data[DataTypeMetaData.KEY_GID] = dt.gid data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type data[DataTypeMetaData.KEY_STATE] = dt.state data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject) data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else '' data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else '' data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else '' data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else '' data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else '' # Operation related fields: operation_name = CommonDetails.compute_operation_name( dt.parent_operation.algorithm.algorithm_category.displayname, dt.parent_operation.algorithm.displayname) data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None completion_date = dt.parent_operation.completion_date string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else "" string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else "" data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else '' data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-' metadata_list.append(DataTypeMetaData(data, dt.invalid)) return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
def populate_values(data_list, type_, category_key, complex_dt_attributes=None): """ Populate meta-data fields for data_list (list of DataTypes). """ values = [] all_field_values = '' for value in data_list: # Here we only populate with DB data, actual # XML check will be done after select and submit. entity_gid = value[2] actual_entity = dao.get_generic_entity(type_, entity_gid, "gid") display_name = '' if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType): display_name = actual_entity[0].display_name display_name = display_name + ' - ' + (value[3] or "None ") if value[5]: display_name = display_name + ' - From: ' + str(value[5]) else: display_name = display_name + utils.date2string(value[4]) if value[6]: display_name = display_name + ' - ' + str(value[6]) display_name = display_name + ' - ID:' + str(value[0]) all_field_values = all_field_values + str(entity_gid) + ',' values.append({ABCAdapter.KEY_NAME: display_name, ABCAdapter.KEY_VALUE: entity_gid}) if complex_dt_attributes is not None: ### TODO apply filter on sub-attributes values[-1][ABCAdapter.KEY_ATTRIBUTES] = complex_dt_attributes if category_key is not None: category = dao.get_category_by_id(category_key) if (not category.display) and (not category.rawinput) and len(data_list) > 1: values.insert(0, {ABCAdapter.KEY_NAME: "All", ABCAdapter.KEY_VALUE: all_field_values[:-1]}) return values
def store(self, view_model, fname=None): # type: (ViewModel, str) -> str """ Completely store any ViewModel object to the directory specified by self.base_dir. Works recursively for view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ if fname is None: h5_path = self.path_for_has_traits(type(view_model), view_model.gid) else: h5_path = os.path.join(self.base_dir, fname) with ViewModelH5(h5_path, view_model) as h5_file: h5_file.store(view_model) h5_file.type.store(self.get_class_path(view_model)) h5_file.create_date.store(date2string(datetime.now())) if hasattr(view_model, "generic_attributes"): h5_file.store_generic_attributes(view_model.generic_attributes) else: # For HasTraits not inheriting from ViewModel (e.g. Linear) h5_file.store_generic_attributes(GenericAttributes()) references = h5_file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): self.store(model_attr[idx]) else: self.store(model_attr) return h5_path
def _prepare_display_name(self, value): # TODO remove duplicate with TraitedDataTypeSelectField """ Populate meta-data fields for data_list (list of DataTypes). Private method, to be called recursively. It will receive a list of Attributes, and it will populate 'options' entry with data references from DB. """ # Here we only populate with DB data, actual # XML check will be done after select and submit. entity_gid = value[2] actual_entity = dao.get_generic_entity(self.datatype_index, entity_gid, "gid") display_name = actual_entity[0].display_name display_name += ' - ' + (value[3] or "None ") if value[5]: display_name += ' - From: ' + str(value[5]) else: display_name += utils.date2string(value[4]) if value[6]: display_name += ' - ' + str(value[6]) display_name += ' - ID:' + str(value[0]) return display_name
def _populate_values(data_list, type_, category_key): """ Populate meta-data fields for data_list (list of DataTypes). Private method, to be called recursively. It will receive a list of Attributes, and it will populate 'options' entry with data references from DB. """ values = [] all_field_values = '' for value in data_list: # Here we only populate with DB data, actual # XML check will be done after select and submit. entity_gid = value[2] actual_entity = dao.get_generic_entity(type_, entity_gid, "gid") display_name = '' if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType): display_name = actual_entity[0].display_name display_name += ' - ' + (value[3] or "None ") if value[5]: display_name += ' - From: ' + str(value[5]) else: display_name += utils.date2string(value[4]) if value[6]: display_name += ' - ' + str(value[6]) display_name += ' - ID:' + str(value[0]) all_field_values += str(entity_gid) + ',' values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid}) if category_key is not None: category = dao.get_category_by_id(category_key) if not category.display and not category.rawinput and len(data_list) > 1: values.insert(0, {KEY_NAME: "All", KEY_VALUE: all_field_values[:-1]}) return values
def populate_values(data_list, type_, category_key, complex_dt_attributes=None): """ Populate meta-data fields for data_list (list of DataTypes). """ values = [] all_field_values = '' for value in data_list: # Here we only populate with DB data, actual # XML check will be done after select and submit. entity_gid = value[2] actual_entity = dao.get_generic_entity(type_, entity_gid, "gid") display_name = '' if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType): display_name = actual_entity[0].display_name display_name = display_name + ' - ' + value[3] if value[5]: display_name = display_name + ' - From: ' + str(value[5]) else: display_name = display_name + utils.date2string(value[4]) if value[6]: display_name = display_name + ' - ' + str(value[6]) display_name = display_name + ' - ID:' + str(value[0]) all_field_values = all_field_values + str(entity_gid) + ',' values.append({ABCAdapter.KEY_NAME: display_name, ABCAdapter.KEY_VALUE: entity_gid}) if complex_dt_attributes is not None: ### TODO apply filter on sub-attributes values[-1][ABCAdapter.KEY_ATTRIBUTES] = complex_dt_attributes if category_key is not None: category = dao.get_category_by_id(category_key) if (not category.display) and (not category.rawinput) and len(data_list) > 1: values.insert(0, {ABCAdapter.KEY_NAME: "All", ABCAdapter.KEY_VALUE: all_field_values[:-1]}) return values
def update_local_connectivity_metadata(file_path): with LocalConnectivityH5(file_path) as f: f.storage_manager.set_metadata( { 'Shape': "(16384, 16384)", 'format': "csc", "dtype": "<f8" }, "/matrix") f.storage_manager.set_metadata( { 'cutoff': 40.0, 'state': "RAW_DATA", 'subject': "John Doe", 'user_tag_1': "srf_16k", 'user_tag_2': "", 'user_tag_3': "", 'user_tag_4': "", 'user_tag_5': "", 'type': "", 'create_date': date2string(datetime.now()), 'visible': True, 'is_nan': False, 'gid': UUID('3e551cbd-47ca-11e4-9f21-3c075431bf56').urn, 'surface': UUID('10467c4f-d487-4186-afa6-d9b1fd8383d8').urn }, )
def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value): """ Find all DataTypes (including the linked ones and the groups) relevant for the current project. In case of a problem, will return an empty list. """ metadata_list = [] dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value) for dt in dt_list: # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects data = {} is_group = False group_op = None dt_entity = dao.get_datatype_by_gid(dt.gid) if dt_entity is None: self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt)) continue # Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None: is_group = True group_op = dt.parent_operation.operation_group # All these fields are necessary here for dynamic Tree levels. data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id data[DataTypeMetaData.KEY_GID] = dt.gid data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type data[DataTypeMetaData.KEY_STATE] = dt.state data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject) data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else '' data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else '' data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else '' data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else '' data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else '' # Operation related fields: operation_name = CommonDetails.compute_operation_name( dt.parent_operation.algorithm.algorithm_category.displayname, dt.parent_operation.algorithm.displayname) data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None completion_date = dt.parent_operation.completion_date string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else "" string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else "" data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else '' data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-' metadata_list.append(DataTypeMetaData(data, dt.invalid)) return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
def initiate_operation(self, current_user, project_id, adapter_instance, temporary_storage, visible=True, **kwargs): """ Gets the parameters of the computation from the previous inputs form, and launches a computation (on the cluster or locally). Invoke custom method on an Adapter Instance. Make sure when the operation has finished that the correct results are stored into DB. """ if not isinstance(adapter_instance, ABCAdapter): self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__)) raise LaunchException("Developer Exception!!") # Prepare Files parameters files = {} kw2 = copy(kwargs) for i, j in six.iteritems(kwargs): if isinstance(j, FieldStorage) or isinstance(j, Part): files[i] = j del kw2[i] temp_files = {} try: for i, j in six.iteritems(files): if j.file is None: kw2[i] = None continue uq_name = utils.date2string(datetime.now(), True) + '_' + str(i) # We have to add original file name to end, in case file processing # involves file extension reading file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename file_name = os.path.join(temporary_storage, file_name) kw2[i] = file_name temp_files[i] = file_name with open(file_name, 'wb') as file_obj: file_obj.write(j.file.read()) self.logger.debug("Will store file:" + file_name) kwargs = kw2 except Exception as excep: self._handle_exception(excep, temp_files, "Could not launch operation: invalid input files!") ### Store Operation entity. algo = adapter_instance.stored_adapter algo_category = dao.get_category_by_id(algo.fk_category) operations = self.prepare_operations(current_user.id, project_id, algo, algo_category, {}, visible, **kwargs)[0] if isinstance(adapter_instance, ABCSynchronous): if len(operations) > 1: raise LaunchException("Synchronous operations are not supporting ranges!") if len(operations) < 1: self.logger.warning("No operation was defined") raise LaunchException("Invalid empty Operation!!!") return self.initiate_prelaunch(operations[0], adapter_instance, temp_files, **kwargs) else: return self._send_to_cluster(operations, adapter_instance, current_user.username)
def _capture_operation_results(self, result, user_tag=None): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ results_to_store = [] data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id # All entities will have the same subject and state subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT] state = self.meta_data[DataTypeMetaData.KEY_STATE] burst_reference = None if DataTypeMetaData.KEY_BURST in self.meta_data: burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST] perpetuated_identifier = None if DataTypeMetaData.KEY_TAG_1 in self.meta_data: perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1] for res in result: if res is None: continue res.subject = str(subject) res.state = state res.fk_parent_burst = burst_reference res.fk_from_operation = self.operation_id res.framework_metadata = self.meta_data if not res.user_tag_1: res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier else: res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier res.fk_datatype_group = data_type_group_id ## Compute size-on disk, in case file-storage is used if hasattr(res, 'storage_path') and hasattr(res, 'get_storage_file_name'): associated_file = os.path.join(res.storage_path, res.get_storage_file_name()) res.close_file() res.disk_size = self.file_handler.compute_size_on_disk(associated_file) res = dao.store_entity(res) # Write metaData res.persist_full_metadata() results_to_store.append(res) del result[0:len(result)] result.extend(results_to_store) if len(result) and self._is_group_launch(): ## Update the operation group name operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group) operation_group.fill_operationgroup_name(result[0].type) dao.store_entity(operation_group) return 'Operation ' + str(self.operation_id) + ' has finished.', len(results_to_store)
def to_dict(self, excludes=['id']): """ For a model entity, return a equivalent dictionary. """ dict_equivalent = {} for key in self.__dict__: if '_sa_' not in key[:5] and key not in excludes: if isinstance(self.__dict__[key], datetime.datetime): dict_equivalent[key] = date2string(self.__dict__[key]) else: dict_equivalent[key] = self.__dict__[key] return self.__class__.__name__, dict_equivalent
def fill_from_datatype(self, datatype_result, parent_burst): """ Fill current dictionary with information from a loaded DB DataType. :param datatype_result DB loaded DataType :param parent_burst Burst entity in which current dataType was generated """ self.gid = datatype_result.gid self.data_type_id = datatype_result.id self.data_state = datatype_result.state self.datatype_title = datatype_result.display_name self.subject = datatype_result.subject self.data_type = datatype_result.type self.datatype_tag_1 = datatype_result.user_tag_1 self.datatype_tag_2 = datatype_result.user_tag_2 self.datatype_tag_3 = datatype_result.user_tag_3 self.datatype_tag_4 = datatype_result.user_tag_4 self.datatype_tag_5 = datatype_result.user_tag_5 self.datatype_size = datatype_result.disk_size self.author = datatype_result.parent_operation.user.username parent_algorithm = datatype_result.parent_operation.algorithm operation_name = self.compute_operation_name( parent_algorithm.algorithm_category.displayname, parent_algorithm.displayname) self.operation_type = operation_name create_date_str = '' if datatype_result.parent_operation.completion_date is not None: create_date_str = date2string( datatype_result.parent_operation.completion_date) self.create_date = create_date_str if parent_burst is not None: self.burst_name = parent_burst.name else: self.burst_name = '' ### Populate Group attributes if isinstance(datatype_result, DataTypeGroup): self.count = datatype_result.count_results self.operation_group_name = datatype_result.parent_operation.operation_group.name self.operation_label = datatype_result.parent_operation.operation_group.name self.operation_group_id = datatype_result.parent_operation.operation_group.id else: self.operation_label = datatype_result.parent_operation.user_group ### Populate Scientific attributes if hasattr( datatype_result, 'summary_info') and datatype_result.summary_info is not None: self.add_scientific_fields(datatype_result.summary_info)
def _capture_operation_results(self, result): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string( datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id( operation.fk_operation_group).id count_stored = 0 if result is None: return "", count_stored group_type = None # In case of a group, the first not-none type is sufficient to memorize here for res in result: if res is None: continue if not res.fixed_generic_attributes: res.fill_from_generic_attributes(self.generic_attributes) res.fk_from_operation = self.operation_id res.fk_datatype_group = data_type_group_id associated_file = h5.path_for_stored_index(res) if os.path.exists(associated_file): if not res.fixed_generic_attributes: with H5File.from_file(associated_file) as f: f.store_generic_attributes(self.generic_attributes) # Compute size-on disk, in case file-storage is used res.disk_size = self.storage_interface.compute_size_on_disk( associated_file) dao.store_entity(res) res.after_store() group_type = res.type count_stored += 1 if count_stored > 0 and self._is_group_launch(): # Update the operation group name operation_group = dao.get_operationgroup_by_id( operation.fk_operation_group) operation_group.fill_operationgroup_name(group_type) dao.store_entity(operation_group) return 'Operation ' + str( self.operation_id) + ' has finished.', count_stored
def _capture_operation_results(self, result): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string( datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id( operation.fk_operation_group).id burst_reference = None if DataTypeMetaData.KEY_BURST in self.meta_data: burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST] count_stored = 0 group_type = None # In case of a group, the first not-none type is sufficient to memorize here for res in result: if res is None: continue res.subject = self.generic_attributes.subject res.state = self.generic_attributes.state res.fk_parent_burst = burst_reference res.fk_from_operation = self.operation_id res.framework_metadata = self.meta_data res.user_tag_1 = self.generic_attributes.user_tag_1 res.user_tag_2 = self.generic_attributes.user_tag_2 res.fk_datatype_group = data_type_group_id # Compute size-on disk, in case file-storage is used associated_file = h5.path_for_stored_index(res) if os.path.exists(associated_file): res.disk_size = self.file_handler.compute_size_on_disk( associated_file) with H5File.from_file(associated_file) as f: f.store_generic_attributes(self.generic_attributes) dao.store_entity(res) group_type = res.type count_stored += 1 if count_stored > 0 and self._is_group_launch(): # Update the operation group name operation_group = dao.get_operationgroup_by_id( operation.fk_operation_group) operation_group.fill_operationgroup_name(group_type) dao.store_entity(operation_group) return 'Operation ' + str( self.operation_id) + ' has finished.', count_stored
def initiate_operation(self, current_user, project_id, adapter_instance, temporary_storage, method_name=ABCAdapter.LAUNCH_METHOD, visible=True, **kwargs): """ Gets the parameters of the computation from the previous inputs form, and launches a computation (on the cluster or locally). Invoke custom method on an Adapter Instance. Make sure when the operation has finished that the correct results are stored into DB. """ if not isinstance(adapter_instance, ABCAdapter): self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__)) raise LaunchException("Developer Exception!!") # Prepare Files parameters files = {} kw2 = copy(kwargs) for i, j in kwargs.iteritems(): if isinstance(j, FieldStorage) or isinstance(j, Part): files[i] = j del kw2[i] temp_files = {} try: for i, j in files.iteritems(): if j.file is None: kw2[i] = None continue uq_name = utils.date2string(datetime.now(), True) + '_' + str(i) # We have to add original file name to end, in case file processing # involves file extension reading file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename file_name = os.path.join(temporary_storage, file_name) kw2[i] = file_name temp_files[i] = file_name with open(file_name, 'wb') as file_obj: file_obj.write(j.file.read()) self.logger.debug("Will store file:" + file_name) kwargs = kw2 except Exception, excep: self._handle_exception( excep, temp_files, "Could not launch operation: invalid input files!")
def fill_operationgroup_name(self, entities_in_group): """ Display name for UI. """ new_name = "of " + entities_in_group + " varying " if self.range1 is not None: new_name += json.loads(self.range1)[0] if self.range2 is not None: new_name += " x " + json.loads(self.range2)[0] if self.range3 is not None: new_name += " x " + json.loads(self.range3)[0] new_name += " - " + date2string(datetime.datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) self.name = new_name
def _prepare_dt_display_name(self, dt_index, dt): # dt is a result of the get_values_of_datatype function db_dt = dao.get_generic_entity(dt_index, dt[2], "gid") display_name = db_dt[0].display_name display_name += ' - ' + (dt[3] or "None ") # Subject if dt[5]: display_name += ' - From: ' + str(dt[5]) else: display_name += utils.date2string(dt[4]) if dt[6]: display_name += ' - ' + str(dt[6]) display_name += ' - ID:' + str(dt[0]) return display_name
def to_dict(self, excludes=None): """ For a model entity, return a equivalent dictionary. """ if excludes is None: excludes = ["id"] dict_equivalent = {} for key in self.__dict__: if '_sa_' not in key[:5] and key not in excludes: if isinstance(self.__dict__[key], datetime.datetime): dict_equivalent[key] = date2string(self.__dict__[key]) else: dict_equivalent[key] = self.__dict__[key] return self.__class__.__name__, dict_equivalent
def fill_from_datatype(self, datatype_result, parent_burst): """ Fill current dictionary with information from a loaded DB DataType. :param datatype_result DB loaded DataType :param parent_burst Burst entity in which current dataType was generated """ self.gid = datatype_result.gid self.data_type_id = datatype_result.id self.data_state = datatype_result.state self.datatype_title = datatype_result.display_name self.subject = datatype_result.subject self.data_type = datatype_result.type self.datatype_tag_1 = datatype_result.user_tag_1 self.datatype_tag_2 = datatype_result.user_tag_2 self.datatype_tag_3 = datatype_result.user_tag_3 self.datatype_tag_4 = datatype_result.user_tag_4 self.datatype_tag_5 = datatype_result.user_tag_5 self.datatype_size = datatype_result.disk_size self.author = datatype_result.parent_operation.user.username parent_algorithm = datatype_result.parent_operation.algorithm operation_name = self.compute_operation_name(parent_algorithm.algorithm_category.displayname, parent_algorithm.displayname) self.operation_type = operation_name create_date_str = '' if datatype_result.parent_operation.completion_date is not None: create_date_str = date2string(datatype_result.parent_operation.completion_date) self.create_date = create_date_str if parent_burst is not None: self.burst_name = parent_burst.name else: self.burst_name = '' ### Populate Group attributes if isinstance(datatype_result, model.DataTypeGroup): self.count = datatype_result.count_results self.operation_group_name = datatype_result.parent_operation.operation_group.name self.operation_label = datatype_result.parent_operation.operation_group.name self.operation_group_id = datatype_result.parent_operation.operation_group.id else: self.operation_label = datatype_result.parent_operation.user_group ### Populate Scientific attributes if hasattr(datatype_result, 'summary_info') and datatype_result.summary_info is not None: self.add_scientific_fields(datatype_result.summary_info)
def __datatype2metastructure(row, dt_ids): """ Convert a list of data retrieved from DB and create a DataTypeMetaData object. """ data = {} is_group = False group = None if row[7] is not None and row[7] and row[14] in dt_ids: is_group = True group = dao.get_generic_entity(model.OperationGroup, row[7]) if group and len(group): group = group[0] else: is_group = False datatype_group = None if row[14] is not None and row[14] in dt_ids: datatype_group = dao.get_datatype_by_id(row[14]) dt_entity = dao.get_datatype_by_gid(row[9]) data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name ## All these fields are necessary here for dynamic Tree levels. data[DataTypeMetaData.KEY_NODE_TYPE] = datatype_group.type if datatype_group is not None else row[0] data[DataTypeMetaData.KEY_STATE] = row[1] data[DataTypeMetaData.KEY_SUBJECT] = str(row[2]) operation_name = CommonDetails.compute_operation_name(row[3], row[4], row[5]) data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name data[DataTypeMetaData.KEY_AUTHOR] = row[6] data[DataTypeMetaData.KEY_OPERATION_TAG] = group.name if is_group else row[8] data[DataTypeMetaData.KEY_OP_GROUP_ID] = group.id if is_group else None data[DataTypeMetaData.KEY_GID] = datatype_group.gid if datatype_group is not None else row[9] data[DataTypeMetaData.KEY_DATE] = date2string(row[10]) if (row[10] is not None) else '' data[DataTypeMetaData.KEY_DATATYPE_ID] = datatype_group.id if datatype_group is not None else row[11] data[DataTypeMetaData.KEY_LINK] = row[12] data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = row[5] date_string = row[10].strftime(MONTH_YEAR_FORMAT) if row[10] is not None else "" data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = date_string date_string = row[10].strftime(DAY_MONTH_YEAR_FORMAT) if row[10] is not None else "" data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = date_string data[DataTypeMetaData.KEY_BURST] = row[15] if row[15] is not None else '-None-' data[DataTypeMetaData.KEY_TAG_1] = row[16] if row[16] else '' data[DataTypeMetaData.KEY_TAG_2] = row[17] if row[17] else '' data[DataTypeMetaData.KEY_TAG_3] = row[18] if row[18] else '' data[DataTypeMetaData.KEY_TAG_4] = row[19] if row[19] else '' data[DataTypeMetaData.KEY_TAG_5] = row[20] if row[20] else '' data[DataTypeMetaData.KEY_RELEVANCY] = True if row[21] > 0 else False invalid = True if row[13] else False return DataTypeMetaData(data, invalid)
def store_generic_attributes(self, generic_attributes): # type: (GenericAttributes) -> None # write_metadata creation time, serializer class name, etc self.create_date.store(date2string(datetime.now())) self.generic_attributes.fill_from(generic_attributes) self.invalid.store(self.generic_attributes.invalid) self.is_nan.store(self.generic_attributes.is_nan) self.subject.store(self.generic_attributes.subject) self.state.store(self.generic_attributes.state) self.type.store(self.generic_attributes.type) self.user_tag_1.store(self.generic_attributes.user_tag_1) self.user_tag_2.store(self.generic_attributes.user_tag_2) self.user_tag_3.store(self.generic_attributes.user_tag_3) self.user_tag_4.store(self.generic_attributes.user_tag_4) self.user_tag_5.store(self.generic_attributes.user_tag_5) self.visible.store(self.generic_attributes.visible)
def _populate_values(data_list, type_, category_key, complex_dt_attributes=None): """ Populate meta-data fields for data_list (list of DataTypes). Private method, to be called recursively. It will receive a list of Attributes, and it will populate 'options' entry with data references from DB. """ values = [] all_field_values = [] for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list: # Here we only populate with DB data, actual # XML check will be done after select and submit. actual_entity = dao.get_generic_entity(type_, entity_gid, "gid") display_name = '' if actual_entity is not None and len( actual_entity) > 0 and isinstance(actual_entity[0], model.DataType): display_name = actual_entity[0].display_name display_name += ' - ' + (subject or "None ") if group: display_name += ' - From: ' + str(group) else: display_name += utils.date2string(completion_date) if gr_name: display_name += ' - ' + str(gr_name) display_name += ' - ID:' + str(id_) all_field_values.append(str(entity_gid)) values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid}) if complex_dt_attributes is not None: ### TODO apply filter on sub-attributes values[-1][ KEY_ATTRIBUTES] = complex_dt_attributes # this is the copy of complex dtype attributes on all db options if category_key is not None: category = dao.get_category_by_id(category_key) if not category.display and not category.rawinput and len( data_list) > 1: values.insert(0, { KEY_NAME: "All", KEY_VALUE: ','.join(all_field_values) }) return values
def fill_operationgroup_name(self, entities_in_group): """ Display name for UI. """ new_name = "of " + entities_in_group + " varying " if self.range1 is not None: range_param1 = RangeParameter.from_json(self.range1) new_name += range_param1.name if self.range2 is not None: range_param2 = RangeParameter.from_json(self.range2) new_name += " x " + range_param2.name if self.range3 is not None: range_param3 = RangeParameter.from_json(self.range3) new_name += " x " + range_param3.name new_name += " - " + date2string(datetime.datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) self.name = new_name
def _serialize_value(self, value): """ This method takes a value which will be stored as metadata and apply some transformation if necessary :param value: value which is planned to be stored :returns: value to be stored """ if value is None: return '' # Transform boolean to string and prefix it if isinstance(value, bool): return self.BOOL_VALUE_PREFIX + str(value) # Transform date to string and append prefix elif isinstance(value, datetime): return self.DATETIME_VALUE_PREFIX + utils.date2string(value, date_format=self.DATE_TIME_FORMAT) else: return value
def _store(self, file, view_model): file.store(view_model) file.type.store(self.get_class_path(view_model)) file.create_date.store(date2string(datetime.now())) if hasattr(view_model, "generic_attributes"): file.store_generic_attributes(view_model.generic_attributes) else: # For HasTraits not inheriting from ViewModel (e.g. Linear) file.store_generic_attributes(GenericAttributes()) references = file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): self.store(model_attr[idx]) else: self.store(model_attr)
def store_generic_attributes(self, generic_attributes, create=True): # type: (GenericAttributes, bool) -> None # write_metadata creation time, serializer class name, etc if create: self.create_date.store(date2string(datetime.now())) self.generic_attributes.fill_from(generic_attributes) self.invalid.store(self.generic_attributes.invalid) self.is_nan.store(self.generic_attributes.is_nan) self.subject.store(self.generic_attributes.subject) self.state.store(self.generic_attributes.state) self.user_tag_1.store(self.generic_attributes.user_tag_1) self.user_tag_2.store(self.generic_attributes.user_tag_2) self.user_tag_3.store(self.generic_attributes.user_tag_3) self.user_tag_4.store(self.generic_attributes.user_tag_4) self.user_tag_5.store(self.generic_attributes.user_tag_5) self.operation_tag.store(self.generic_attributes.operation_tag) self.visible.store(self.generic_attributes.visible) if self.generic_attributes.parent_burst is not None: self.parent_burst.store(uuid.UUID(self.generic_attributes.parent_burst))
def initiate_operation(self, current_user, project_id, adapter_instance, temporary_storage, method_name=ABCAdapter.LAUNCH_METHOD, visible=True, **kwargs): """ Gets the parameters of the computation from the previous inputs form, and launches a computation (on the cluster or locally). Invoke custom method on an Adapter Instance. Make sure when the operation has finished that the correct results are stored into DB. """ if not isinstance(adapter_instance, ABCAdapter): self.logger.warning("Inconsistent Adapter Class:" + str(adapter_instance.__class__)) raise LaunchException("Developer Exception!!") # Prepare Files parameters files = {} kw2 = copy(kwargs) for i, j in kwargs.iteritems(): if isinstance(j, FieldStorage) or isinstance(j, Part): files[i] = j del kw2[i] temp_files = {} try: for i, j in files.iteritems(): if j.file is None: kw2[i] = None continue uq_name = utils.date2string(datetime.now(), True) + '_' + str(i) # We have to add original file name to end, in case file processing # involves file extension reading file_name = TEMPORARY_PREFIX + uq_name + '_' + j.filename file_name = os.path.join(temporary_storage, file_name) kw2[i] = file_name temp_files[i] = file_name file_obj = open(file_name, 'wb') file_obj.write(j.file.read()) file_obj.close() self.logger.debug("Will store file:" + file_name) kwargs = kw2 except Exception, excep: self._handle_exception(excep, temp_files, "Could not launch operation: invalid input files!")
def _serialize_value(self, value): """ This method takes a value which will be stored as metadata and apply some transformation if necessary :param value: value which is planned to be stored :returns: value to be stored """ if value is None: return '' # Force unicode strings to simple strings. if isinstance(value, unicode): return str(value) # Transform boolean to string and prefix it elif isinstance(value, bool): return self.BOOL_VALUE_PREFIX + str(value) # Transform date to string and append prefix elif isinstance(value, datetime): return self.DATETIME_VALUE_PREFIX + utils.date2string(value, date_format=self.DATE_TIME_FORMAT) else: return value
def _prepare_upload_post_data(self, form, post_data, project_id): for form_field in form.trait_fields: if isinstance(form_field, TraitUploadField) and form_field.name in post_data: field = post_data[form_field.name] file_name = None if hasattr(field, 'file') and field.file is not None: project = dao.get_project_by_id(project_id) temporary_storage = self.file_helper.get_project_folder( project, self.file_helper.TEMP_FOLDER) try: uq_name = utils.date2string(datetime.now(), True) + '_' + str(0) file_name = TEMPORARY_PREFIX + uq_name + '_' + field.filename file_name = os.path.join(temporary_storage, file_name) with open(file_name, 'wb') as file_obj: file_obj.write(field.file.read()) except Exception as excep: # TODO: is this handled properly? self.file_helper.remove_files([file_name]) excep.message = 'Could not continue: Invalid input files' raise excep post_data[form_field.name] = file_name
def _serialize_value(value): """ This method takes a value which will be stored as meta-data and apply some transformation if necessary :param value: value which is planned to be stored :returns: value to be stored NOTE: this method was a part of TVB 1.0 hdf5storage manager, but since this script needs to be independent of current storage manager, we duplicate it here. """ if value is None: return '' # Force unicode strings to simple strings. if isinstance(value, unicode): return str(value) # Transform boolean to string and prefix it elif isinstance(value, bool): return BOOL_VALUE_PREFIX + str(value) # Transform date to string and append prefix elif isinstance(value, datetime): return DATETIME_VALUE_PREFIX + date2string(value, date_format=DATE_TIME_FORMAT) else: return value
def _capture_operation_results(self, result, user_tag=None): """ After an operation was finished, make sure the results are stored in DB storage and the correct meta-data,IDs are set. """ results_to_store = [] data_type_group_id = None operation = dao.get_operation_by_id(self.operation_id) if operation.user_group is None or len(operation.user_group) == 0: operation.user_group = date2string( datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT) operation = dao.store_entity(operation) if self._is_group_launch(): data_type_group_id = dao.get_datatypegroup_by_op_group_id( operation.fk_operation_group).id # All entities will have the same subject and state subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT] state = self.meta_data[DataTypeMetaData.KEY_STATE] burst_reference = None if DataTypeMetaData.KEY_BURST in self.meta_data: burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST] perpetuated_identifier = None if DataTypeMetaData.KEY_TAG_1 in self.meta_data: perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1] for res in result: if res is None: continue res.subject = str(subject) res.state = state res.fk_parent_burst = burst_reference res.fk_from_operation = self.operation_id res.framework_metadata = self.meta_data if not res.user_tag_1: res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier else: res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier res.fk_datatype_group = data_type_group_id ## Compute size-on disk, in case file-storage is used if hasattr(res, 'storage_path') and hasattr( res, 'get_storage_file_name'): associated_file = os.path.join(res.storage_path, res.get_storage_file_name()) res.close_file() res.disk_size = self.file_handler.compute_size_on_disk( associated_file) res = dao.store_entity(res) # Write metaData res.persist_full_metadata() results_to_store.append(res) del result[0:len(result)] result.extend(results_to_store) if len(result) and self._is_group_launch(): ## Update the operation group name operation_group = dao.get_operationgroup_by_id( operation.fk_operation_group) operation_group.fill_operationgroup_name(result[0].type) dao.store_entity(operation_group) return 'Operation ' + str( self.operation_id) + ' has finished.', len(results_to_store)