class ProjectService: """ Services layer for Project entities. """ def __init__(self): self.logger = get_logger(__name__) self.storage_interface = StorageInterface() def store_project(self, current_user, is_create, selected_id, **data): """ We want to create/update a project entity. """ # Validate Unique Name new_name = data["name"] if len(new_name) < 1: raise ProjectServiceException("Invalid project name!") projects_no = dao.count_projects_for_name(new_name, selected_id) if projects_no > 0: err = {'name': 'Please choose another name, this one is used!'} raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err) started_operations = dao.get_operation_numbers(selected_id)[1] if started_operations > 0: raise ProjectServiceException( "A project can not be renamed while operations are still running!" ) if is_create: current_proj = Project(new_name, current_user.id, data["description"]) self.storage_interface.get_project_folder(current_proj.name) else: try: current_proj = dao.get_project_by_id(selected_id) except Exception as excep: self.logger.exception("An error has occurred!") raise ProjectServiceException(str(excep)) if current_proj.name != new_name: self.storage_interface.rename_project(current_proj.name, new_name) current_proj.name = new_name current_proj.description = data["description"] # Commit to make sure we have a valid ID current_proj.refresh_update_date() _, metadata_proj = current_proj.to_dict() self.storage_interface.write_project_metadata(metadata_proj) current_proj = dao.store_entity(current_proj) # Retrieve, to initialize lazy attributes current_proj = dao.get_project_by_id(current_proj.id) # Update share settings on current Project entity visited_pages = [] prj_admin = current_proj.administrator.username if 'visited_pages' in data and data['visited_pages']: visited_pages = data['visited_pages'].split(',') for page in visited_pages: members = UserService.retrieve_users_except([prj_admin], int(page), MEMBERS_PAGE_SIZE)[0] members = [m.id for m in members] dao.delete_members_for_project(current_proj.id, members) selected_user_ids = data["users"] if is_create and current_user.id not in selected_user_ids: # Make the project admin also member of the current project selected_user_ids.append(current_user.id) dao.add_members_to_project(current_proj.id, selected_user_ids) # Finish operation self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:'******'-' result["count"] = one_op[2] result["gid"] = one_op[13] operation_group_id = one_op[3] if operation_group_id is not None and operation_group_id: try: operation_group = dao.get_generic_entity( OperationGroup, operation_group_id)[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) result[ "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None result["gid"] = operation_group.gid # Filter only viewers for current DataTypeGroup entity: if datatype_group is None: view_groups = None else: view_groups = AlgorithmService( ).get_visualizers_for_group(datatype_group.gid) result["view_groups"] = view_groups except Exception: self.logger.exception( "We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result[ "start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] if not result['group']: result['results'] = dao.get_results_for_operation( result['id']) else: result['results'] = None operations.append(result) except Exception: # We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception( "Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no def retrieve_projects_for_user(self, user_id, current_page=1): """ Return a list with all Projects visible for current user. """ start_idx = PROJECTS_PAGE_SIZE * (current_page - 1) total = dao.get_projects_for_user(user_id, is_count=True) available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE) pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0) for prj in available_projects: fns, sta, err, canceled, pending = dao.get_operation_numbers( prj.id) prj.operations_finished = fns prj.operations_started = sta prj.operations_error = err prj.operations_canceled = canceled prj.operations_pending = pending prj.disk_size = dao.get_project_disk_size(prj.id) prj.disk_size_human = format_bytes_human(prj.disk_size) self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id)) return available_projects, pages_no @staticmethod def retrieve_all_user_projects(user_id, page_start=0, page_size=PROJECTS_PAGE_SIZE): """ Return a list with all projects visible for current user, without pagination. """ return dao.get_projects_for_user(user_id, page_start=page_start, page_size=page_size) @staticmethod def get_linkable_projects_for_user(user_id, data_id): """ Find projects with are visible for current user, and in which current datatype hasn't been linked yet. """ return dao.get_linkable_projects_for_user(user_id, data_id) @transactional def remove_project(self, project_id): """ Remove Project from DB and File Storage. """ try: project2delete = dao.get_project_by_id(project_id) self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name) project_datatypes = dao.get_datatypes_in_project(project_id) project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True) for one_data in project_datatypes: self.remove_datatype(project_id, one_data.gid, True) links = dao.get_links_for_project(project_id) for one_link in links: dao.remove_entity(Links, one_link.id) project_bursts = dao.get_bursts_for_project(project_id) for burst in project_bursts: dao.remove_entity(burst.__class__, burst.id) self.storage_interface.remove_project(project2delete) dao.delete_project(project_id) self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name) except RemoveDataTypeException as excep: self.logger.exception("Could not execute operation Node Remove!") raise ProjectServiceException(str(excep)) except FileStructureException as excep: self.logger.exception("Could not delete because of rights!") raise ProjectServiceException(str(excep)) except Exception as excep: self.logger.exception(str(excep)) raise ProjectServiceException(str(excep)) # ----------------- Methods for populating Data-Structure Page --------------- @staticmethod def get_datatype_in_group(group): """ Return all dataTypes that are the result of the same DTgroup. """ return dao.get_datatype_in_group(datatype_group_id=group) @staticmethod def get_datatypes_from_datatype_group(datatype_group_id): """ Retrieve all dataType which are part from the given dataType group. """ return dao.get_datatypes_from_datatype_group(datatype_group_id) @staticmethod def load_operation_by_gid(operation_gid): """ Retrieve loaded Operation from DB""" return dao.get_operation_by_gid(operation_gid) @staticmethod def load_operation_lazy_by_gid(operation_gid): """ Retrieve lazy Operation from DB""" return dao.get_operation_lazy_by_gid(operation_gid) @staticmethod def get_operation_group_by_id(operation_group_id): """ Loads OperationGroup from DB""" return dao.get_operationgroup_by_id(operation_group_id) @staticmethod def get_operation_group_by_gid(operation_group_gid): """ Loads OperationGroup from DB""" return dao.get_operationgroup_by_gid(operation_group_gid) @staticmethod def get_operations_in_group(operation_group): """ Return all the operations from an operation group. """ return dao.get_operations_in_group(operation_group.id) @staticmethod def is_upload_operation(operation_gid): """ Returns True only if the operation with the given GID is an upload operation. """ return dao.is_upload_operation(operation_gid) @staticmethod def get_all_operations_for_uploaders(project_id): """ Returns all finished upload operations. """ return dao.get_all_operations_for_uploaders(project_id) def set_operation_and_group_visibility(self, entity_gid, is_visible, is_operation_group=False): """ Sets the operation visibility. If 'is_operation_group' is True than this method will change the visibility for all the operation from the OperationGroup with the GID field equal to 'entity_gid'. """ def set_visibility(op): # workaround: # 'reload' the operation so that it has the project property set. # get_operations_in_group does not eager load it and now we're out of a sqlalchemy session # write_operation_metadata requires that property op = dao.get_operation_by_id(op.id) # end hack op.visible = is_visible dao.store_entity(op) def set_group_descendants_visibility(operation_group_id): ops_in_group = dao.get_operations_in_group(operation_group_id) for group_op in ops_in_group: set_visibility(group_op) if is_operation_group: op_group_id = dao.get_operationgroup_by_gid(entity_gid).id set_group_descendants_visibility(op_group_id) else: operation = dao.get_operation_by_gid(entity_gid) # we assure that if the operation belongs to a group than the visibility will be changed for the entire group if operation.fk_operation_group is not None: set_group_descendants_visibility(operation.fk_operation_group) else: set_visibility(operation) def get_operation_details(self, operation_gid, is_group): """ :returns: an entity OperationOverlayDetails filled with all information for current operation details. """ if is_group: operation_group = self.get_operation_group_by_gid(operation_gid) operation = dao.get_operations_in_group(operation_group.id, False, True) # Reload, to make sure all attributes lazy are populated as well. operation = dao.get_operation_by_gid(operation.gid) no_of_op_in_group = dao.get_operations_in_group(operation_group.id, is_count=True) datatype_group = self.get_datatypegroup_by_op_group_id( operation_group.id) count_result = dao.count_datatypes_in_group(datatype_group.id) else: operation = dao.get_operation_by_gid(operation_gid) if operation is None: return None no_of_op_in_group = 1 count_result = dao.count_resulted_datatypes(operation.id) user_display_name = dao.get_user_by_id( operation.fk_launched_by).display_name burst = dao.get_burst_for_operation_id(operation.id) datatypes_param, all_special_params = self._review_operation_inputs( operation.gid) op_pid = dao.get_operation_process_for_operation(operation.id) op_details = OperationOverlayDetails(operation, user_display_name, len(datatypes_param), count_result, burst, no_of_op_in_group, op_pid) # Add all parameter which are set differently by the user on this Operation. if all_special_params is not None: op_details.add_scientific_fields(all_special_params) return op_details @staticmethod def get_filterable_meta(): """ Contains all the attributes by which the user can structure the tree of DataTypes """ return DataTypeMetaData.get_filterable_meta() def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value): """ Find all DataTypes (including the linked ones and the groups) relevant for the current project. In case of a problem, will return an empty list. """ metadata_list = [] dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value) for dt in dt_list: # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects data = {} is_group = False group_op = None # Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None: is_group = True group_op = dt.parent_operation.operation_group # All these fields are necessary here for dynamic Tree levels. data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id data[DataTypeMetaData.KEY_GID] = dt.gid data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type data[DataTypeMetaData.KEY_STATE] = dt.state data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject) data[DataTypeMetaData.KEY_TITLE] = dt.display_name data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible data[DataTypeMetaData. KEY_LINK] = dt.parent_operation.fk_launched_in != project.id data[DataTypeMetaData. KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else '' data[DataTypeMetaData. KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else '' data[DataTypeMetaData. KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else '' data[DataTypeMetaData. KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else '' data[DataTypeMetaData. KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else '' # Operation related fields: operation_name = CommonDetails.compute_operation_name( dt.parent_operation.algorithm.algorithm_category.displayname, dt.parent_operation.algorithm.displayname) data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name data[ DataTypeMetaData. KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname data[DataTypeMetaData. KEY_AUTHOR] = dt.parent_operation.user.username data[ DataTypeMetaData. KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group data[DataTypeMetaData. KEY_OP_GROUP_ID] = group_op.id if is_group else None completion_date = dt.parent_operation.completion_date string_year = completion_date.strftime( MONTH_YEAR_FORMAT) if completion_date is not None else "" string_month = completion_date.strftime( DAY_MONTH_YEAR_FORMAT) if completion_date is not None else "" data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if ( completion_date is not None) else '' data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month data[ DataTypeMetaData. KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-' metadata_list.append(DataTypeMetaData(data, dt.invalid)) return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name) @staticmethod def get_datatype_details(datatype_gid): """ :returns: an array. First entry in array is an instance of DataTypeOverlayDetails\ The second one contains all the possible states for the specified dataType. """ meta_atts = DataTypeOverlayDetails() states = DataTypeMetaData.STATES try: datatype_result = dao.get_datatype_details(datatype_gid) meta_atts.fill_from_datatype(datatype_result, datatype_result._parent_burst) return meta_atts, states, datatype_result except Exception: # We ignore exception here (it was logged above, and we want to return no details). return meta_atts, states, None def _remove_project_node_files(self, project_id, gid, skip_validation=False): """ Delegate removal of a node in the structure of the project. In case of a problem will THROW StructureException. """ try: project = self.find_project(project_id) datatype = dao.get_datatype_by_gid(gid) links = dao.get_links_for_datatype(datatype.id) op = dao.get_operation_by_id(datatype.fk_from_operation) if links: was_link = False for link in links: # This means it's only a link and we need to remove it if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id: dao.remove_entity(Links, link.id) was_link = True if not was_link: # Create a clone of the operation # There is no view_model so the view_model_gid is None new_op = Operation( op.view_model_gid, dao.get_system_user().id, links[0].fk_to_project, datatype.parent_operation.fk_from_algo, datatype.parent_operation.status, datatype.parent_operation.start_date, datatype.parent_operation.completion_date, datatype.parent_operation.fk_operation_group, datatype.parent_operation.additional_info, datatype.parent_operation.user_group, datatype.parent_operation.range_values) new_op = dao.store_entity(new_op) to_project = self.find_project(links[0].fk_to_project) to_project_path = self.storage_interface.get_project_folder( to_project.name) full_path = h5.path_for_stored_index(datatype) old_folder = self.storage_interface.get_project_folder( project.name, str(op.id)) vm_full_path = h5.determine_filepath( op.view_model_gid, old_folder) self.storage_interface.move_datatype_with_sync( to_project, to_project_path, new_op.id, full_path, vm_full_path) datatype.fk_from_operation = new_op.id datatype.parent_operation = new_op dao.store_entity(datatype) dao.remove_entity(Links, links[0].id) else: specific_remover = get_remover(datatype.type)(datatype) specific_remover.remove_datatype(skip_validation) except RemoveDataTypeException: self.logger.exception("Could not execute operation Node Remove!") raise except FileStructureException: self.logger.exception("Remove operation failed") raise StructureException( "Remove operation failed for unknown reasons.Please contact system administrator." ) def remove_operation(self, operation_id): """ Remove a given operation """ operation = dao.try_get_operation_by_id(operation_id) if operation is not None: self.logger.debug("Deleting operation %s " % operation) datatypes_for_op = dao.get_results_for_operation(operation_id) for dt in reversed(datatypes_for_op): self.remove_datatype(operation.project.id, dt.gid, False) # Here the Operation is mot probably already removed - in case DTs were found inside # but we still remove it for the case when no DTs exist dao.remove_entity(Operation, operation.id) self.storage_interface.remove_operation_data( operation.project.name, operation_id) self.storage_interface.push_folder_to_sync(operation.project.name) self.logger.debug("Finished deleting operation %s " % operation) else: self.logger.warning( "Attempt to delete operation with id=%s which no longer exists." % operation_id) def remove_datatype(self, project_id, datatype_gid, skip_validation=False): """ Method used for removing a dataType. If the given dataType is a DatatypeGroup or a dataType from a DataTypeGroup than this method will remove the entire group. The operation(s) used for creating the dataType(s) will also be removed. """ datatype = dao.get_datatype_by_gid(datatype_gid) if datatype is None: self.logger.warning( "Attempt to delete DT[%s] which no longer exists." % datatype_gid) return is_datatype_group = False datatype_group = None if dao.is_datatype_group(datatype_gid): is_datatype_group = True datatype_group = datatype elif datatype.fk_datatype_group is not None: is_datatype_group = True datatype_group = dao.get_datatype_by_id(datatype.fk_datatype_group) operations_set = [datatype.fk_from_operation] correct = True if is_datatype_group: operations_set = [datatype_group.fk_from_operation] self.logger.debug("Removing datatype group %s" % datatype_group) if datatype_group.fk_parent_burst: burst = dao.get_generic_entity(BurstConfiguration, datatype_group.fk_parent_burst, 'gid')[0] dao.remove_entity(BurstConfiguration, burst.id) if burst.fk_metric_operation_group: correct = correct and self._remove_operation_group( burst.fk_metric_operation_group, project_id, skip_validation, operations_set) if burst.fk_operation_group: correct = correct and self._remove_operation_group( burst.fk_operation_group, project_id, skip_validation, operations_set) else: self._remove_datatype_group_dts(project_id, datatype_group.id, skip_validation, operations_set) datatype_group = dao.get_datatype_group_by_gid( datatype_group.gid) dao.remove_entity(DataTypeGroup, datatype.id) correct = correct and dao.remove_entity( OperationGroup, datatype_group.fk_operation_group) else: self.logger.debug("Removing datatype %s" % datatype) self._remove_project_node_files(project_id, datatype.gid, skip_validation) # Remove Operation entity in case no other DataType needs them. project = dao.get_project_by_id(project_id) for operation_id in operations_set: dependent_dt = dao.get_generic_entity(DataType, operation_id, "fk_from_operation") if len(dependent_dt) > 0: # Do not remove Operation in case DataType still exist referring it. continue op_burst = dao.get_burst_for_operation_id(operation_id) if op_burst: correct = correct and dao.remove_entity( BurstConfiguration, op_burst.id) correct = correct and dao.remove_entity(Operation, operation_id) # Make sure Operation folder is removed self.storage_interface.remove_operation_data( project.name, operation_id) self.storage_interface.push_folder_to_sync(project.name) if not correct: raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid)) def _remove_operation_group(self, operation_group_id, project_id, skip_validation, operations_set): metrics_groups = dao.get_generic_entity(DataTypeGroup, operation_group_id, 'fk_operation_group') if len(metrics_groups) > 0: metric_datatype_group_id = metrics_groups[0].id self._remove_datatype_group_dts(project_id, metric_datatype_group_id, skip_validation, operations_set) dao.remove_entity(DataTypeGroup, metric_datatype_group_id) return dao.remove_entity(OperationGroup, operation_group_id) def _remove_datatype_group_dts(self, project_id, dt_group_id, skip_validation, operations_set): data_list = dao.get_datatypes_from_datatype_group(dt_group_id) for adata in data_list: self._remove_project_node_files(project_id, adata.gid, skip_validation) if adata.fk_from_operation not in operations_set: operations_set.append(adata.fk_from_operation) def update_metadata(self, submit_data): """ Update DataType/ DataTypeGroup metadata THROW StructureException when input data is invalid. """ new_data = dict() for key in DataTypeOverlayDetails().meta_attributes_list: if key in submit_data: value = submit_data[key] if value == "None": value = None if value == "" and key in [ CommonDetails.CODE_OPERATION_TAG, CommonDetails.CODE_OPERATION_GROUP_ID ]: value = None new_data[key] = value try: if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]): # We need to edit a group all_data_in_group = dao.get_datatype_in_group( operation_group_id=new_data[ CommonDetails.CODE_OPERATION_GROUP_ID]) if len(all_data_in_group) < 1: raise StructureException( "Inconsistent group, can not be updated!") # datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0] # all_data_in_group.append(datatype_group) for datatype in all_data_in_group: self._edit_data(datatype, new_data, True) else: # Get the required DataType and operation from DB to store changes that will be done in XML. gid = new_data[CommonDetails.CODE_GID] datatype = dao.get_datatype_by_gid(gid) self._edit_data(datatype, new_data) except Exception as excep: self.logger.exception(excep) raise StructureException(str(excep)) def _edit_data(self, datatype, new_data, from_group=False): # type: (DataType, dict, bool) -> None """ Private method, used for editing a meta-data XML file and a DataType row for a given custom DataType entity with new dictionary of data from UI. """ # 1. First update Operation fields: # Update group field if possible new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG] empty_group_value = (new_group_name is None or new_group_name == "") if from_group: if empty_group_value: raise StructureException("Empty group is not allowed!") group = dao.get_generic_entity( OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID]) if group and len(group) > 0 and new_group_name != group[0].name: group = group[0] exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name') if exists_group: raise StructureException("Group '" + new_group_name + "' already exists.") group.name = new_group_name dao.store_entity(group) else: operation = dao.get_operation_by_id(datatype.fk_from_operation) operation.user_group = new_group_name dao.store_entity(operation) op_folder = self.storage_interface.get_project_folder( operation.project.name, str(operation.id)) vm_gid = operation.view_model_gid view_model_file = h5.determine_filepath(vm_gid, op_folder) if view_model_file: view_model_class = H5File.determine_type(view_model_file) view_model = view_model_class() with ViewModelH5(view_model_file, view_model) as f: ga = f.load_generic_attributes() ga.operation_tag = new_group_name f.store_generic_attributes(ga, False) else: self.logger.warning( "Could not find ViewModel H5 file for op: {}".format( operation)) # 2. Update GenericAttributes in the associated H5 files: h5_path = h5.path_for_stored_index(datatype) with H5File.from_file(h5_path) as f: ga = f.load_generic_attributes() ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT] ga.state = new_data[DataTypeOverlayDetails.DATA_STATE] ga.operation_tag = new_group_name if DataTypeOverlayDetails.DATA_TAG_1 in new_data: ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1] if DataTypeOverlayDetails.DATA_TAG_2 in new_data: ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2] if DataTypeOverlayDetails.DATA_TAG_3 in new_data: ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3] if DataTypeOverlayDetails.DATA_TAG_4 in new_data: ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4] if DataTypeOverlayDetails.DATA_TAG_5 in new_data: ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5] f.store_generic_attributes(ga, False) # 3. Update MetaData in DT Index DB as well. datatype.fill_from_generic_attributes(ga) dao.store_entity(datatype) def _review_operation_inputs(self, operation_gid): """ :returns: A list of DataTypes that are used as input parameters for the specified operation. And a dictionary will all operation parameters different then the default ones. """ operation = dao.get_operation_by_gid(operation_gid) try: adapter = ABCAdapter.build_adapter(operation.algorithm) return review_operation_inputs_from_adapter(adapter, operation) except Exception: self.logger.exception("Could not load details for operation %s" % operation_gid) if operation.view_model_gid: changed_parameters = dict( Warning= "Algorithm changed dramatically. We can not offer more details" ) else: changed_parameters = dict( Warning= "GID parameter is missing. Old implementation of the operation." ) return [], changed_parameters @staticmethod def get_results_for_operation(operation_id, selected_filter=None): """ Retrieve the DataTypes entities resulted after the execution of the given operation. """ return dao.get_results_for_operation(operation_id, selected_filter) @staticmethod def get_datatype_by_id(datatype_id): """Retrieve a DataType DB reference by its id.""" return dao.get_datatype_by_id(datatype_id) @staticmethod def get_datatypegroup_by_gid(datatypegroup_gid): """ Returns the DataTypeGroup with the specified gid. """ return dao.get_datatype_group_by_gid(datatypegroup_gid) @staticmethod def get_datatypegroup_by_op_group_id(operation_group_id): """ Returns the DataTypeGroup with the specified id. """ return dao.get_datatypegroup_by_op_group_id(operation_group_id) @staticmethod def get_datatypes_in_project(project_id, only_visible=False): return dao.get_data_in_project(project_id, only_visible) @staticmethod def set_datatype_visibility(datatype_gid, is_visible): """ Sets the dataType visibility. If the given dataType is a dataType group or it is part of a dataType group than this method will set the visibility for each dataType from this group. """ def set_visibility(dt): """ set visibility flag, persist in db and h5""" dt.visible = is_visible dt = dao.store_entity(dt) h5_path = h5.path_for_stored_index(dt) with H5File.from_file(h5_path) as f: f.visible.store(is_visible) def set_group_descendants_visibility(datatype_group_id): datatypes_in_group = dao.get_datatypes_from_datatype_group( datatype_group_id) for group_dt in datatypes_in_group: set_visibility(group_dt) datatype = dao.get_datatype_by_gid(datatype_gid) if isinstance(datatype, DataTypeGroup): # datatype is a group set_group_descendants_visibility(datatype.id) datatype.visible = is_visible dao.store_entity(datatype) elif datatype.fk_datatype_group is not None: # datatype is member of a group set_group_descendants_visibility(datatype.fk_datatype_group) # the datatype to be updated is the parent datatype group parent = dao.get_datatype_by_id(datatype.fk_datatype_group) parent.visible = is_visible dao.store_entity(parent) else: # update the single datatype. set_visibility(datatype) @staticmethod def is_datatype_group(datatype_gid): """ Used to check if the dataType with the specified GID is a DataTypeGroup. """ return dao.is_datatype_group(datatype_gid) def get_linked_datatypes_storage_path(self, project): """ :return: the file paths to the datatypes that are linked in `project` """ paths = [] for lnk_dt in dao.get_linked_datatypes_in_project(project.id): # get datatype as a mapped type path = h5.path_for_stored_index(lnk_dt) if path is not None: paths.append(path) else: self.logger.warning( "Problem when trying to retrieve path on %s:%s!" % (lnk_dt.type, lnk_dt.gid)) return paths
class ImportService(object): """ Service for importing TVB entities into system. It supports TVB exported H5 files as input, but it should also handle H5 files generated outside of TVB, as long as they respect the same structure. """ def __init__(self): self.logger = get_logger(__name__) self.user_id = None self.storage_interface = StorageInterface() self.created_projects = [] self.view_model2adapter = self._populate_view_model2adapter() def _download_and_unpack_project_zip(self, uploaded, uq_file_name, temp_folder): if isinstance(uploaded, (FieldStorage, Part)): if not uploaded.file: raise ImportException( "Please select the archive which contains the project structure." ) with open(uq_file_name, 'wb') as file_obj: self.storage_interface.copy_file(uploaded.file, file_obj) else: shutil.copy2(uploaded, uq_file_name) try: self.storage_interface.unpack_zip(uq_file_name, temp_folder) except FileStructureException as excep: self.logger.exception(excep) raise ImportException( "Bad ZIP archive provided. A TVB exported project is expected!" ) @staticmethod def _compute_unpack_path(): """ :return: the name of the folder where to expand uploaded zip """ now = datetime.now() date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour, now.minute, now.second, now.microsecond) uq_name = "%s-ImportProject" % date_str return os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name) @transactional def import_project_structure(self, uploaded, user_id): """ Execute import operations: 1. check if ZIP or folder 2. find all project nodes 3. for each project node: - create project - create all operations and groups - import all images - create all dataTypes """ self.user_id = user_id self.created_projects = [] # Now compute the name of the folder where to explode uploaded ZIP file temp_folder = self._compute_unpack_path() uq_file_name = temp_folder + ".zip" try: self._download_and_unpack_project_zip(uploaded, uq_file_name, temp_folder) self._import_project_from_folder(temp_folder) except Exception as excep: self.logger.exception( "Error encountered during import. Deleting projects created during this operation." ) # Remove project folders created so far. # Note that using the project service to remove the projects will not work, # because we do not have support for nested transaction. # Removing from DB is not necessary because in transactional env a simple exception throw # will erase everything to be inserted. for project in self.created_projects: self.storage_interface.remove_project(project) raise ImportException(str(excep)) finally: # Now delete uploaded file and temporary folder where uploaded ZIP was exploded. self.storage_interface.remove_files([uq_file_name, temp_folder]) def _import_project_from_folder(self, temp_folder): """ Process each project from the uploaded pack, to extract names. """ temp_project_path = None for root, _, files in os.walk(temp_folder): if StorageInterface.TVB_PROJECT_FILE in files: temp_project_path = root break if temp_project_path is not None: update_manager = ProjectUpdateManager(temp_project_path) if update_manager.checked_version < 3: raise ImportException( 'Importing projects with versions older than 3 is not supported in TVB 2! ' 'Please import the project in TVB 1.5.8 and then launch the current version of ' 'TVB in order to upgrade this project!') update_manager.run_all_updates() project = self.__populate_project(temp_project_path) # Populate the internal list of create projects so far, for cleaning up folders, in case of failure self.created_projects.append(project) # Ensure project final folder exists on disk project_path = self.storage_interface.get_project_folder( project.name) shutil.move( os.path.join(temp_project_path, StorageInterface.TVB_PROJECT_FILE), project_path) # Now import project operations with their results self.import_list_of_operations(project, temp_project_path) # Import images and move them from temp into target self._store_imported_images(project, temp_project_path, project.name) if StorageInterface.encryption_enabled(): self.storage_interface.remove_project(project, True) def _load_datatypes_from_operation_folder(self, src_op_path, operation_entity, datatype_group): """ Loads datatypes from operation folder :returns: Datatype entities as dict {original_path: Dt instance} """ all_datatypes = {} for file_name in os.listdir(src_op_path): if self.storage_interface.ends_with_tvb_storage_file_extension( file_name): h5_file = os.path.join(src_op_path, file_name) try: file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(h5_file) datatype = self.load_datatype_from_file( h5_file, operation_entity.id, datatype_group, operation_entity.fk_launched_in) all_datatypes[h5_file] = datatype except IncompatibleFileManagerException: os.remove(h5_file) self.logger.warning( "Incompatible H5 file will be ignored: %s" % h5_file) self.logger.exception("Incompatibility details ...") return all_datatypes @staticmethod def check_import_references(file_path, datatype): h5_class = H5File.h5_class_from_file(file_path) reference_list = h5_class(file_path).gather_references() for _, reference_gid in reference_list: if not reference_gid: continue ref_index = load.load_entity_by_gid(reference_gid) if ref_index is None: os.remove(file_path) dao.remove_entity(datatype.__class__, datatype.id) raise MissingReferenceException( 'Imported file depends on datatypes that do not exist. Please upload ' 'those first!') def _store_or_link_burst_config(self, burst_config, bc_path, project_id): bc_already_in_tvb = dao.get_generic_entity(BurstConfiguration, burst_config.gid, 'gid') if len(bc_already_in_tvb) == 0: self.store_datatype(burst_config, bc_path) return 1 return 0 def store_or_link_datatype(self, datatype, dt_path, project_id): self.check_import_references(dt_path, datatype) stored_dt_count = 0 datatype_already_in_tvb = load.load_entity_by_gid(datatype.gid) if not datatype_already_in_tvb: self.store_datatype(datatype, dt_path) stored_dt_count = 1 elif datatype_already_in_tvb.parent_operation.project.id != project_id: AlgorithmService.create_link(datatype_already_in_tvb.id, project_id) if datatype_already_in_tvb.fk_datatype_group: AlgorithmService.create_link( datatype_already_in_tvb.fk_datatype_group, project_id) return stored_dt_count def _store_imported_datatypes_in_db(self, project, all_datatypes): # type: (Project, dict) -> int sorted_dts = sorted( all_datatypes.items(), key=lambda dt_item: dt_item[1].create_date or datetime.now()) count = 0 for dt_path, datatype in sorted_dts: count += self.store_or_link_datatype(datatype, dt_path, project.id) return count def _store_imported_images(self, project, temp_project_path, project_name): """ Import all images from project """ images_root = os.path.join(temp_project_path, StorageInterface.IMAGES_FOLDER) target_images_path = self.storage_interface.get_images_folder( project_name) for root, _, files in os.walk(images_root): for metadata_file in files: if self.storage_interface.ends_with_tvb_file_extension( metadata_file): self._import_image(root, metadata_file, project.id, target_images_path) @staticmethod def _populate_view_model2adapter(): if len(VIEW_MODEL2ADAPTER) > 0: return VIEW_MODEL2ADAPTER view_model2adapter = {} algos = dao.get_all_algorithms() for algo in algos: adapter = ABCAdapter.build_adapter(algo) view_model_class = adapter.get_view_model_class() view_model2adapter[view_model_class] = algo return view_model2adapter def _retrieve_operations_in_order(self, project, import_path, importer_operation_id=None): # type: (Project, str, int) -> list[Operation2ImportData] retrieved_operations = [] for root, _, files in os.walk(import_path): if OPERATION_XML in files: # Previous Operation format for uploading previous versions of projects operation_file_path = os.path.join(root, OPERATION_XML) operation, operation_xml_parameters, _ = self.build_operation_from_file( project, operation_file_path) operation.import_file = operation_file_path self.logger.debug("Found operation in old XML format: " + str(operation)) retrieved_operations.append( Operation2ImportData( operation, root, info_from_xml=operation_xml_parameters)) else: # We strive for the new format with ViewModelH5 main_view_model = None dt_paths = [] all_view_model_files = [] for file in files: if self.storage_interface.ends_with_tvb_storage_file_extension( file): h5_file = os.path.join(root, file) try: h5_class = H5File.h5_class_from_file(h5_file) if h5_class is ViewModelH5: all_view_model_files.append(h5_file) if not main_view_model: view_model = h5.load_view_model_from_file( h5_file) if type( view_model ) in self.view_model2adapter.keys(): main_view_model = view_model else: file_update_manager = FilesUpdateManager() file_update_manager.upgrade_file(h5_file) dt_paths.append(h5_file) except Exception: self.logger.warning( "Unreadable H5 file will be ignored: %s" % h5_file) if main_view_model is not None: alg = self.view_model2adapter[type(main_view_model)] op_group_id = None if main_view_model.operation_group_gid: op_group = dao.get_operationgroup_by_gid( main_view_model.operation_group_gid.hex) if not op_group: op_group = OperationGroup( project.id, ranges=json.loads(main_view_model.ranges), gid=main_view_model.operation_group_gid.hex) op_group = dao.store_entity(op_group) op_group_id = op_group.id operation = Operation( main_view_model.gid.hex, project.fk_admin, project.id, alg.id, status=STATUS_FINISHED, user_group=main_view_model.generic_attributes. operation_tag, start_date=datetime.now(), completion_date=datetime.now(), op_group_id=op_group_id, range_values=main_view_model.range_values) operation.create_date = main_view_model.create_date operation.visible = main_view_model.generic_attributes.visible self.logger.debug( "Found main ViewModel to create operation for it: " + str(operation)) retrieved_operations.append( Operation2ImportData(operation, root, main_view_model, dt_paths, all_view_model_files)) elif len(dt_paths) > 0: alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) default_adapter = ABCAdapter.build_adapter(alg) view_model = default_adapter.get_view_model_class()() view_model.data_file = dt_paths[0] vm_path = h5.store_view_model(view_model, root) all_view_model_files.append(vm_path) operation = Operation(view_model.gid.hex, project.fk_admin, project.id, alg.id, status=STATUS_FINISHED, start_date=datetime.now(), completion_date=datetime.now()) operation.create_date = datetime.min self.logger.debug( "Found no ViewModel in folder, so we default to " + str(operation)) if importer_operation_id: operation.id = importer_operation_id retrieved_operations.append( Operation2ImportData(operation, root, view_model, dt_paths, all_view_model_files, True)) return sorted(retrieved_operations, key=lambda op_data: op_data.order_field) def create_view_model(self, operation_entity, operation_data, new_op_folder, generic_attributes=None, add_params=None): view_model = self._get_new_form_view_model( operation_entity, operation_data.info_from_xml) if add_params is not None: for element in add_params: key_attr = getattr(view_model, element[0]) setattr(key_attr, element[1], element[2]) view_model.range_values = operation_entity.range_values op_group = dao.get_operationgroup_by_id( operation_entity.fk_operation_group) if op_group: view_model.operation_group_gid = uuid.UUID(op_group.gid) view_model.ranges = json.dumps(op_group.range_references) view_model.is_metric_operation = 'DatatypeMeasure' in op_group.name if generic_attributes is not None: view_model.generic_attributes = generic_attributes view_model.generic_attributes.operation_tag = operation_entity.user_group h5.store_view_model(view_model, new_op_folder) view_model_disk_size = StorageInterface.compute_recursive_h5_disk_usage( new_op_folder) operation_entity.view_model_disk_size = view_model_disk_size operation_entity.view_model_gid = view_model.gid.hex dao.store_entity(operation_entity) return view_model def import_list_of_operations(self, project, import_path, is_group=False, importer_operation_id=None): """ This method scans provided folder and identify all operations that needs to be imported """ all_dts_count = 0 all_stored_dts_count = 0 imported_operations = [] ordered_operations = self._retrieve_operations_in_order( project, import_path, None if is_group else importer_operation_id) if is_group and len(ordered_operations) > 0: first_op = dao.get_operation_by_id(importer_operation_id) vm_path = h5.determine_filepath(first_op.view_model_gid, os.path.dirname(import_path)) os.remove(vm_path) ordered_operations[0].operation.id = importer_operation_id for operation_data in ordered_operations: if operation_data.is_old_form: operation_entity, datatype_group = self.import_operation( operation_data.operation) new_op_folder = self.storage_interface.get_project_folder( project.name, str(operation_entity.id)) try: operation_datatypes = self._load_datatypes_from_operation_folder( operation_data.operation_folder, operation_entity, datatype_group) # Create and store view_model from operation self.create_view_model(operation_entity, operation_data, new_op_folder) self._store_imported_datatypes_in_db( project, operation_datatypes) imported_operations.append(operation_entity) except MissingReferenceException: operation_entity.status = STATUS_ERROR dao.store_entity(operation_entity) elif operation_data.main_view_model is not None: operation_data.operation.create_date = datetime.now() operation_data.operation.start_date = datetime.now() operation_data.operation.completion_date = datetime.now() do_merge = False if importer_operation_id: do_merge = True operation_entity = dao.store_entity(operation_data.operation, merge=do_merge) dt_group = None op_group = dao.get_operationgroup_by_id( operation_entity.fk_operation_group) if op_group: dt_group = dao.get_datatypegroup_by_op_group_id( op_group.id) if not dt_group: first_op = dao.get_operations_in_group( op_group.id, only_first_operation=True) dt_group = DataTypeGroup( op_group, operation_id=first_op.id, state=DEFAULTDATASTATE_INTERMEDIATE) dt_group = dao.store_entity(dt_group) # Store the DataTypes in db dts = {} all_dts_count += len(operation_data.dt_paths) for dt_path in operation_data.dt_paths: dt = self.load_datatype_from_file(dt_path, operation_entity.id, dt_group, project.id) if isinstance(dt, BurstConfiguration): if op_group: dt.fk_operation_group = op_group.id all_stored_dts_count += self._store_or_link_burst_config( dt, dt_path, project.id) else: dts[dt_path] = dt if op_group: op_group.fill_operationgroup_name(dt.type) dao.store_entity(op_group) try: stored_dts_count = self._store_imported_datatypes_in_db( project, dts) all_stored_dts_count += stored_dts_count if operation_data.main_view_model.is_metric_operation: self._update_burst_metric(operation_entity) imported_operations.append(operation_entity) new_op_folder = self.storage_interface.get_project_folder( project.name, str(operation_entity.id)) view_model_disk_size = 0 for h5_file in operation_data.all_view_model_files: view_model_disk_size += StorageInterface.compute_size_on_disk( h5_file) shutil.move(h5_file, new_op_folder) operation_entity.view_model_disk_size = view_model_disk_size dao.store_entity(operation_entity) except MissingReferenceException as excep: self.storage_interface.remove_operation_data( project.name, operation_entity.id) operation_entity.fk_operation_group = None dao.store_entity(operation_entity) dao.remove_entity(DataTypeGroup, dt_group.id) raise excep else: self.logger.warning( "Folder %s will be ignored, as we could not find a serialized " "operation or DTs inside!" % operation_data.operation_folder) # We want importer_operation_id to be kept just for the first operation (the first iteration) if is_group: importer_operation_id = None self._update_dt_groups(project.id) self._update_burst_configurations(project.id) return imported_operations, all_dts_count, all_stored_dts_count @staticmethod def _get_new_form_view_model(operation, xml_parameters): # type (Operation) -> ViewModel algo = dao.get_algorithm_by_id(operation.fk_from_algo) ad = ABCAdapter.build_adapter(algo) view_model = ad.get_view_model_class()() if xml_parameters: declarative_attrs = type(view_model).declarative_attrs if isinstance(xml_parameters, str): xml_parameters = json.loads(xml_parameters) for param in xml_parameters: new_param_name = param if param != '' and param[0] == "_": new_param_name = param[1:] new_param_name = new_param_name.lower() if new_param_name in declarative_attrs: try: setattr(view_model, new_param_name, xml_parameters[param]) except (TraitTypeError, TraitAttributeError): pass return view_model def _import_image(self, src_folder, metadata_file, project_id, target_images_path): """ Create and store a image entity. """ figure_dict = StorageInterface().read_metadata_from_xml( os.path.join(src_folder, metadata_file)) actual_figure = os.path.join( src_folder, os.path.split(figure_dict['file_path'])[1]) if not os.path.exists(actual_figure): self.logger.warning("Expected to find image path %s .Skipping" % actual_figure) return figure_dict['fk_user_id'] = self.user_id figure_dict['fk_project_id'] = project_id figure_entity = manager_of_class(ResultFigure).new_instance() figure_entity = figure_entity.from_dict(figure_dict) stored_entity = dao.store_entity(figure_entity) # Update image meta-data with the new details after import figure = dao.load_figure(stored_entity.id) shutil.move(actual_figure, target_images_path) self.logger.debug("Store imported figure") _, meta_data = figure.to_dict() self.storage_interface.write_image_metadata(figure, meta_data) def load_datatype_from_file(self, current_file, op_id, datatype_group=None, current_project_id=None): # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex """ Creates an instance of datatype from storage / H5 file :returns: DatatypeIndex """ self.logger.debug("Loading DataType from file: %s" % current_file) h5_class = H5File.h5_class_from_file(current_file) if h5_class is BurstConfigurationH5: if current_project_id is None: op_entity = dao.get_operationgroup_by_id(op_id) current_project_id = op_entity.fk_launched_in h5_file = BurstConfigurationH5(current_file) burst = BurstConfiguration(current_project_id) burst.fk_simulation = op_id h5_file.load_into(burst) result = burst else: datatype, generic_attributes = h5.load_with_links(current_file) already_existing_datatype = h5.load_entity_by_gid(datatype.gid) if datatype_group is not None and already_existing_datatype is not None: raise DatatypeGroupImportException( "The datatype group that you are trying to import" " already exists!") index_class = h5.REGISTRY.get_index_for_datatype( datatype.__class__) datatype_index = index_class() datatype_index.fill_from_has_traits(datatype) datatype_index.fill_from_generic_attributes(generic_attributes) if datatype_group is not None and hasattr(datatype_index, 'fk_source_gid') and \ datatype_index.fk_source_gid is not None: ts = h5.load_entity_by_gid(datatype_index.fk_source_gid) if ts is None: op = dao.get_operations_in_group( datatype_group.fk_operation_group, only_first_operation=True) op.fk_operation_group = None dao.store_entity(op) dao.remove_entity(OperationGroup, datatype_group.fk_operation_group) dao.remove_entity(DataTypeGroup, datatype_group.id) raise DatatypeGroupImportException( "Please import the time series group before importing the" " datatype measure group!") # Add all the required attributes if datatype_group: datatype_index.fk_datatype_group = datatype_group.id if len(datatype_group.subject) == 0: datatype_group.subject = datatype_index.subject dao.store_entity(datatype_group) datatype_index.fk_from_operation = op_id associated_file = h5.path_for_stored_index(datatype_index) if os.path.exists(associated_file): datatype_index.disk_size = StorageInterface.compute_size_on_disk( associated_file) result = datatype_index return result def store_datatype(self, datatype, current_file=None): """This method stores data type into DB""" try: self.logger.debug("Store datatype: %s with Gid: %s" % (datatype.__class__.__name__, datatype.gid)) # Now move storage file into correct folder if necessary if current_file is not None: final_path = h5.path_for_stored_index(datatype) if final_path != current_file: shutil.move(current_file, final_path) stored_entry = load.load_entity_by_gid(datatype.gid) if not stored_entry: stored_entry = dao.store_entity(datatype) return stored_entry except MissingDataSetException as e: self.logger.exception(e) error_msg = "Datatype %s has missing data and could not be imported properly." % ( datatype, ) raise ImportException(error_msg) except IntegrityError as excep: self.logger.exception(excep) error_msg = "Could not import data with gid: %s. There is already a one with " \ "the same name or gid." % datatype.gid raise ImportException(error_msg) def __populate_project(self, project_path): """ Create and store a Project entity. """ self.logger.debug("Creating project from path: %s" % project_path) project_dict = self.storage_interface.read_project_metadata( project_path) project_entity = manager_of_class(Project).new_instance() project_entity = project_entity.from_dict(project_dict, self.user_id) try: self.logger.debug("Storing imported project") return dao.store_entity(project_entity) except IntegrityError as excep: self.logger.exception(excep) error_msg = ( "Could not import project: %s with gid: %s. There is already a " "project with the same name or gid.") % (project_entity.name, project_entity.gid) raise ImportException(error_msg) def build_operation_from_file(self, project, operation_file): """ Create Operation entity from metadata file. """ operation_dict = StorageInterface().read_metadata_from_xml( operation_file) operation_entity = manager_of_class(Operation).new_instance() return operation_entity.from_dict(operation_dict, dao, self.user_id, project.gid) @staticmethod def import_operation(operation_entity, migration=False): """ Store a Operation entity. """ do_merge = False if operation_entity.id: do_merge = True operation_entity = dao.store_entity(operation_entity, merge=do_merge) operation_group_id = operation_entity.fk_operation_group datatype_group = None if operation_group_id is not None: datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) if datatype_group is None and migration is False: # If no dataType group present for current op. group, create it. operation_group = dao.get_operationgroup_by_id( operation_group_id) datatype_group = DataTypeGroup( operation_group, operation_id=operation_entity.id) datatype_group.state = UploadAlgorithmCategoryConfig.defaultdatastate datatype_group = dao.store_entity(datatype_group) return operation_entity, datatype_group def import_simulator_configuration_zip(self, zip_file): # Now compute the name of the folder where to explode uploaded ZIP file temp_folder = self._compute_unpack_path() uq_file_name = temp_folder + ".zip" if isinstance(zip_file, (FieldStorage, Part)): if not zip_file.file: raise ServicesBaseException( "Could not process the given ZIP file...") with open(uq_file_name, 'wb') as file_obj: self.storage_interface.copy_file(zip_file.file, file_obj) else: shutil.copy2(zip_file, uq_file_name) try: self.storage_interface.unpack_zip(uq_file_name, temp_folder) return temp_folder except FileStructureException as excep: raise ServicesBaseException( "Could not process the given ZIP file..." + str(excep)) @staticmethod def _update_burst_metric(operation_entity): burst_config = dao.get_burst_for_operation_id(operation_entity.id) if burst_config and burst_config.ranges: if burst_config.fk_metric_operation_group is None: burst_config.fk_metric_operation_group = operation_entity.fk_operation_group dao.store_entity(burst_config) @staticmethod def _update_dt_groups(project_id): dt_groups = dao.get_datatypegroup_for_project(project_id) for dt_group in dt_groups: dt_group.count_results = dao.count_datatypes_in_group(dt_group.id) dts_in_group = dao.get_datatypes_from_datatype_group(dt_group.id) if dts_in_group: dt_group.fk_parent_burst = dts_in_group[0].fk_parent_burst dao.store_entity(dt_group) @staticmethod def _update_burst_configurations(project_id): burst_configs = dao.get_bursts_for_project(project_id) for burst_config in burst_configs: burst_config.datatypes_number = dao.count_datatypes_in_burst( burst_config.gid) dao.store_entity(burst_config)