def test_fourier_spectrum_model_to_h5(tmph5factory, time_series_index_factory): fs = FourierSpectrum() fsm = FourierSpectrumModel(input_data=fs.gid) path = tmph5factory() h5_file = ViewModelH5(path, fsm) h5_file.store(fsm) h5_file.close() loaded_dt = FourierSpectrumModel() h5_file = ViewModelH5(path, loaded_dt) h5_file.load_into(loaded_dt) assert loaded_dt.input_data == fs.gid assert loaded_dt.gid == fsm.gid
def gather_all_references_of_view_model(gid, base_dir, ref_files): vm_path = determine_filepath(gid, base_dir) ref_files.append(vm_path) view_model_class = H5File.determine_type(vm_path) view_model = view_model_class() with ViewModelH5(vm_path, view_model) as vm_h5: references = vm_h5.gather_references() uuids = vm_h5.gather_references_by_uuid() for _, gid in references: if not gid: continue if isinstance(gid, (list, tuple)): for list_gid in gid: gather_all_references_of_view_model( list_gid, base_dir, ref_files) else: gather_all_references_of_view_model(gid, base_dir, ref_files) uuid_files = [] for _, gid in uuids: if not gid: continue index = load_entity_by_gid(gid.hex) h5_file = h5_file_for_index(index) uuid_files.append(h5_file.path) gather_all_references_by_index(h5_file, uuid_files) ref_files.extend(uuid_files)
def load_view_model_from_file(filepath): # type: (str) -> ViewModel """ Load a ViewModel object by reading the H5 file specified by filepath. """ base_dir = os.path.dirname(filepath) view_model_class = H5File.determine_type(filepath) view_model = view_model_class() with ViewModelH5(filepath, view_model) as h5_file: h5_file.load_into(view_model) references = h5_file.gather_references() view_model.create_date = string2date(h5_file.create_date.load()) view_model.generic_attributes = h5_file.load_generic_attributes() for trait_attr, gid in references: if not gid: continue if isinstance(gid, list): loaded_ref = [] for idx, sub_gid in enumerate(gid): ref = load_view_model(sub_gid, base_dir) loaded_ref.append(ref) else: loaded_ref = load_view_model(gid, base_dir) setattr(view_model, trait_attr.field_name, loaded_ref) return view_model
def _store_view_model(operation, project, view_model): storage_path = FilesHelper().get_project_folder( project, str(operation.id)) h5_path = h5.path_for(storage_path, ViewModelH5, view_model.gid) h5_file = ViewModelH5(h5_path, view_model) h5_file.store(view_model) h5_file.close()
def load(self, gid=None, fname=None): # type: (typing.Union[uuid.UUID, str], str) -> ViewModel """ Load a ViewModel object by reading the H5 file with the given GID, from the directory self.base_dir """ if fname is None: if gid is None: raise ValueError("Neither gid nor filename is provided to load!") fname = self.find_file_by_gid(gid) else: fname = os.path.join(self.base_dir, fname) view_model_class = H5File.determine_type(fname) view_model = view_model_class() with ViewModelH5(fname, view_model) as h5_file: h5_file.load_into(view_model) references = h5_file.gather_references() view_model.create_date = string2date(h5_file.create_date.load()) view_model.generic_attributes = h5_file.load_generic_attributes() for trait_attr, gid in references: if not gid: continue if isinstance(gid, list): loaded_ref = [] for idx, sub_gid in enumerate(gid): ref = self.load(sub_gid) loaded_ref.append(ref) else: loaded_ref = self.load(gid) setattr(view_model, trait_attr.field_name, loaded_ref) return view_model
def store(self, view_model, fname=None): # type: (ViewModel, str) -> str """ Completely store any ViewModel object to the directory specified by self.base_dir. Works recursively for view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ if fname is None: h5_path = self.path_for_has_traits(type(view_model), view_model.gid) else: h5_path = os.path.join(self.base_dir, fname) with ViewModelH5(h5_path, view_model) as h5_file: h5_file.store(view_model) h5_file.type.store(self.get_class_path(view_model)) h5_file.create_date.store(date2string(datetime.now())) if hasattr(view_model, "generic_attributes"): h5_file.store_generic_attributes(view_model.generic_attributes) else: # For HasTraits not inheriting from ViewModel (e.g. Linear) h5_file.store_generic_attributes(GenericAttributes()) references = h5_file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): self.store(model_attr[idx]) else: self.store(model_attr) return h5_path
def test_server_launch_operation(self, mocker, time_series_index_factory): algorithm_module = "tvb.adapters.analyzers.fourier_adapter" algorithm_class = "FourierAdapter" input_ts_index = time_series_index_factory() fft_model = FFTAdapterModel() fft_model.time_series = UUID(input_ts_index.gid) fft_model.window_function = list(SUPPORTED_WINDOWING_FUNCTIONS)[0] input_folder = self.files_helper.get_project_folder(self.test_project) view_model_h5_path = h5.path_for(input_folder, ViewModelH5, fft_model.gid) view_model_h5 = ViewModelH5(view_model_h5_path, fft_model) view_model_h5.store(fft_model) view_model_h5.close() # Mock flask.request.files to return a dictionary request_mock = mocker.patch.object(flask, 'request') fp = open(view_model_h5_path, 'rb') request_mock.files = { 'file': FileStorage(fp, os.path.basename(view_model_h5_path)) } # Mock launch_operation() call mocker.patch.object(OperationService, 'launch_operation') operation_gid, status = self.launch_resource.post( self.test_project.gid, algorithm_module, algorithm_class) fp.close() assert type(operation_gid) is str assert len(operation_gid) > 0
def load(self, gid=None, fname=None): # type: (typing.Union[uuid.UUID, str], str) -> ViewModel """ Load a ViewModel object by reading the H5 file with the given GID, from the directory self.base_dir """ if fname is None: if gid is None: raise ValueError( "Neither gid nor filename is provided to load!") fname = self.find_file_by_gid(gid) else: fname = os.path.join(self.base_dir, fname) view_model_class = H5File.determine_type(fname) view_model = view_model_class() has_traits_h5 = self.registry.get_h5file_for_datatype( view_model.__class__) if has_traits_h5 != H5File: with has_traits_h5(fname) as file: self._load(file, view_model) else: with ViewModelH5(fname, view_model) as h5_file: self._load(h5_file, view_model) return view_model
def gather_reference_files(self, gid, vm_ref_files, dt_ref_files, load_dts=None): vm_path = self.find_file_by_gid(gid) vm_ref_files.append(vm_path) view_model_class = H5File.determine_type(vm_path) view_model = view_model_class() with ViewModelH5(vm_path, view_model) as vm_h5: references = vm_h5.gather_references() for _, gid in references: if not gid: continue if isinstance(gid, (list, tuple)): for list_gid in gid: self.gather_reference_files(list_gid, vm_ref_files, dt_ref_files, load_dts) else: self.gather_reference_files(gid, vm_ref_files, dt_ref_files, load_dts) if load_dts: load_dts(vm_h5, dt_ref_files)
def post(self, project_gid, algorithm_module, algorithm_classname): """ :generic method of launching Analyzers """ model_file = self.extract_file_from_request() destination_folder = RestResource.get_destination_folder() h5_path = RestResource.save_temporary_file(model_file, destination_folder) try: project = self.project_service.find_project_lazy_by_gid( project_gid) except ProjectServiceException: raise InvalidIdentifierException(INVALID_PROJECT_GID_MESSAGE % project_gid) algorithm = FlowService.get_algorithm_by_module_and_class( algorithm_module, algorithm_classname) if algorithm is None: raise InvalidIdentifierException( 'No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname)) try: adapter_instance = ABCAdapter.build_adapter(algorithm) view_model = adapter_instance.get_view_model_class()() view_model_h5 = ViewModelH5(h5_path, view_model) view_model_gid = view_model_h5.gid.load() # TODO: use logged user user_id = project.fk_admin operation = self.operation_service.prepare_operation( user_id, project.id, algorithm.id, algorithm.algorithm_category, view_model_gid.hex, None, {}) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) if isinstance(adapter_instance, ABCUploader): for key, value in adapter_instance.get_form_class( ).get_upload_information().items(): data_file = self.extract_file_from_request( file_name=key, file_extension=value) data_file_path = RestResource.save_temporary_file( data_file, destination_folder) file_name = os.path.basename(data_file_path) upload_field = getattr(view_model_h5, key) upload_field.store(os.path.join(storage_path, file_name)) shutil.move(data_file_path, storage_path) shutil.move(h5_path, storage_path) os.rmdir(destination_folder) view_model_h5.close() OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep, exc_info=True) raise ServiceException(str(excep)) return operation.gid, HTTP_STATUS_CREATED
def launch_operation(self, project_gid, algorithm_class, view_model, temp_folder): h5_file_path = h5.path_for(temp_folder, ViewModelH5, view_model.gid) h5_file = ViewModelH5(h5_file_path, view_model) h5_file.store(view_model) h5_file.close() model_file_obj = open(h5_file_path, 'rb') files = { "model_file": (os.path.basename(h5_file_path), model_file_obj) } for key in algorithm_class().get_form_class().get_upload_information( ).keys(): path = getattr(view_model, key) data_file_obj = open(path, 'rb') files[key] = (os.path.basename(path), data_file_obj) return requests.post(self.build_request_url( RestLink.LAUNCH_OPERATION.compute_url( True, { LinkPlaceholder.PROJECT_GID.value: project_gid, LinkPlaceholder.ALG_MODULE.value: algorithm_class.__module__, LinkPlaceholder.ALG_CLASSNAME.value: algorithm_class.__name__ })), files=files)
def store_view_model(view_model, base_dir): # type: (ViewModel, str) -> str """ Completely store any ViewModel object to the directory specified by base_dir. It works recursively because there are view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ h5_path = path_for(base_dir, ViewModelH5, view_model.gid, type(view_model).__name__) with ViewModelH5(h5_path, view_model) as h5_file: h5_file.store(view_model) h5_file.type.store(get_full_class_name(type(view_model))) h5_file.create_date.store(date2string(datetime.now())) if hasattr(view_model, "generic_attributes"): h5_file.store_generic_attributes(view_model.generic_attributes) else: # For HasTraits not inheriting from ViewModel (e.g. Linear) h5_file.store_generic_attributes(GenericAttributes()) references = h5_file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): store_view_model(model_attr[idx], base_dir) else: store_view_model(model_attr, base_dir) return h5_path
def _update_vm_generic_operation_tag(view_model, operation): project = dao.get_project_by_id(operation.fk_launched_in) storage_path = FilesHelper().get_project_folder( project, str(operation.id)) h5_path = h5.path_for(storage_path, ViewModelH5, view_model.gid, type(view_model).__name__) with ViewModelH5(h5_path, view_model) as vm_h5: vm_h5.operation_tag.store(operation.user_group)
def test_dummy_importer_mv_to_h5(tmph5factory): dummy_file_name = 'file_name.zip' dummy_scalar = 1.0 divm = DummyImporterViewModel(uploaded=dummy_file_name, dummy_scalar=dummy_scalar) path = tmph5factory() h5_file = ViewModelH5(path, divm) h5_file.store(divm) h5_file.close() loaded_divm = DummyImporterViewModel() assert not hasattr(loaded_divm, 'uploaded') assert loaded_divm.dummy_scalar is None h5_file = ViewModelH5(path, loaded_divm) h5_file.load_into(loaded_divm) assert loaded_divm.uploaded == dummy_file_name assert loaded_divm.dummy_scalar == dummy_scalar
def _update_vm_generic_operation_tag(view_model, operation): project = dao.get_project_by_id(operation.fk_launched_in) h5_path = h5.path_for(operation.id, ViewModelH5, view_model.gid, project.name, type(view_model).__name__) if not os.path.exists(h5_path): return with ViewModelH5(h5_path, view_model) as vm_h5: vm_h5.operation_tag.store(operation.user_group)
def launch_operation(self, current_user_id, model_file, project_gid, algorithm_module, algorithm_classname, fetch_file): temp_folder = FilesHelper.create_temp_folder() model_h5_path = FilesHelper.save_temporary_file( model_file, temp_folder) try: project = self.project_service.find_project_lazy_by_gid( project_gid) except ProjectServiceException: raise InvalidIdentifierException() algorithm = AlgorithmService.get_algorithm_by_module_and_class( algorithm_module, algorithm_classname) if algorithm is None: raise InvalidIdentifierException( 'No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname)) try: adapter_instance = ABCAdapter.build_adapter(algorithm) view_model = adapter_instance.get_view_model_class()() view_model_h5 = ViewModelH5(model_h5_path, view_model) view_model_gid = view_model_h5.gid.load() operation = self.operation_service.prepare_operation( current_user_id, project.id, algorithm.id, algorithm.algorithm_category, view_model_gid.hex, None, {}) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) if isinstance(adapter_instance, ABCUploader): for key, value in adapter_instance.get_form_class( ).get_upload_information().items(): data_file = fetch_file(request_file_key=key, file_extension=value) data_file_path = FilesHelper.save_temporary_file( data_file, temp_folder) file_name = os.path.basename(data_file_path) upload_field = getattr(view_model_h5, key) upload_field.store(os.path.join(storage_path, file_name)) shutil.move(data_file_path, storage_path) shutil.move(model_h5_path, storage_path) os.rmdir(temp_folder) view_model_h5.close() OperationService().launch_operation(operation.id, True) return operation.gid except Exception as excep: self.logger.error(excep, exc_info=True) raise ServiceException(str(excep))
def store(self, view_model, fname=None): # type: (ViewModel, str) -> str """ Completely store any ViewModel object to the directory specified by self.base_dir. Works recursively for view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ if fname is None: h5_path = self.path_for_has_traits(type(view_model), view_model.gid) else: h5_path = os.path.join(self.base_dir, fname) with ViewModelH5(h5_path, view_model) as h5_file: self._store(h5_file, view_model) return h5_path
def load_view_model(self, adapter_instance, operation): storage_path = self.file_helper.get_project_folder( operation.project, str(operation.id)) input_gid = json.loads(operation.parameters)['gid'] # TODO: review location, storage_path, op params deserialization if isinstance(adapter_instance, SimulatorAdapter): view_model = SimulatorSerializer().deserialize_simulator( input_gid, storage_path) else: view_model_class = adapter_instance.get_view_model_class() view_model = view_model_class() h5_path = h5.path_for(storage_path, ViewModelH5, input_gid) h5_file = ViewModelH5(h5_path, view_model) h5_file.load_into(view_model) return view_model
def store(self, ht, fname=None): # type: (HasTraits, str) -> str """ Completely store any ViewModel object to the directory specified by self.base_dir. Works recursively for view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ if fname is None: h5_path = self.path_for_has_traits(type(ht), ht.gid) else: h5_path = os.path.join(self.base_dir, fname) has_traits_h5 = self.registry.get_h5file_for_datatype(ht.__class__) if has_traits_h5 != H5File: with has_traits_h5(h5_path) as file: self._store(file, ht) else: with ViewModelH5(h5_path, ht) as h5_file: self._store(h5_file, ht) return h5_path
def launch_operation(self, current_user_id, model_file, project_gid, algorithm_module, algorithm_classname, fetch_file): temp_folder = create_temp_folder() model_h5_path = save_temporary_file(model_file, temp_folder) try: project = self.project_service.find_project_lazy_by_gid( project_gid) except ProjectServiceException: raise InvalidIdentifierException() try: algorithm = AlgorithmService.get_algorithm_by_module_and_class( algorithm_module, algorithm_classname) if algorithm is None: raise InvalidIdentifierException( 'No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname)) adapter_instance = ABCAdapter.build_adapter(algorithm) view_model = h5.load_view_model_from_file(model_h5_path) if isinstance(adapter_instance, ABCUploader): with ViewModelH5(model_h5_path, view_model) as view_model_h5: for key, value in adapter_instance.get_form_class( ).get_upload_information().items(): data_file = fetch_file(request_file_key=key, file_extension=value) data_file_path = save_temporary_file( data_file, temp_folder) view_model_h5.store_metadata_param(key, data_file_path) view_model = h5.load_view_model_from_file(model_h5_path) operation = self.operation_service.prepare_operation( current_user_id, project, algorithm, view_model=view_model) if os.path.exists(model_h5_path): os.remove(model_h5_path) OperationService().launch_operation(operation.id, True) return operation.gid except Exception as excep: self.logger.error(excep, exc_info=True) raise ServiceException(str(excep))
def store_view_model(view_model, base_dir): # type: (ViewModel, str) -> None """ Completely store any ViewModel object to the directory specified by base_dir. It works recursively because there are view models that are serialized in multiple files (eg. SimulatorAdapterModel) """ h5_path = path_for(base_dir, ViewModelH5, view_model.gid, type(view_model).__name__) with ViewModelH5(h5_path, view_model) as h5_file: h5_file.store(view_model) h5_file.type.store(get_full_class_name(type(view_model))) references = h5_file.gather_references() for trait_attr, gid in references: if not gid: continue model_attr = getattr(view_model, trait_attr.field_name) if isinstance(gid, list): for idx, sub_gid in enumerate(gid): store_view_model(model_attr[idx], base_dir) else: store_view_model(model_attr, base_dir)
def test_adapter_huge_memory_requirement(self, test_adapter_factory): """ Test that an MemoryException is raised in case adapter cant launch due to lack of memory. """ # Prepare adapter test_adapter_factory(adapter_class=TestAdapterHugeMemoryRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHugeMemoryRequired") # Simulate receiving POST data form = TestAdapterHugeMemoryRequiredForm() adapter.submit_form(form) view_model = form.get_view_model()() view_model.test = 5 # Prepare operation for launch operation = model_operation.Operation( self.test_user.id, self.test_project.id, adapter.stored_adapter.id, json.dumps({'gid': view_model.gid.hex}), json.dumps({}), status=model_operation.STATUS_STARTED) operation = dao.store_entity(operation) # Store ViewModel in H5 parent_folder = FilesHelper().get_project_folder( self.test_project, str(operation.id)) view_model_path = os.path.join( parent_folder, h5.path_for(parent_folder, ViewModelH5, view_model.gid)) with ViewModelH5(view_model_path, view_model) as view_model_h5: view_model_h5.store(view_model) # Launch operation with pytest.raises(NoMemoryAvailableException): OperationService().initiate_prelaunch(operation, adapter)
def _edit_data(self, datatype, new_data, from_group=False): # type: (DataType, dict, bool) -> None """ Private method, used for editing a meta-data XML file and a DataType row for a given custom DataType entity with new dictionary of data from UI. """ # 1. First update Operation fields: # Update group field if possible new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG] empty_group_value = (new_group_name is None or new_group_name == "") if from_group: if empty_group_value: raise StructureException("Empty group is not allowed!") group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID]) if group and len(group) > 0 and new_group_name != group[0].name: group = group[0] exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name') if exists_group: raise StructureException("Group '" + new_group_name + "' already exists.") group.name = new_group_name dao.store_entity(group) else: operation = dao.get_operation_by_id(datatype.fk_from_operation) operation.user_group = new_group_name dao.store_entity(operation) op_folder = self.structure_helper.get_project_folder(operation.project, str(operation.id)) vm_gid = operation.view_model_gid view_model_file = h5.determine_filepath(vm_gid, op_folder) if view_model_file: view_model_class = H5File.determine_type(view_model_file) view_model = view_model_class() with ViewModelH5(view_model_file, view_model) as f: ga = f.load_generic_attributes() ga.operation_tag = new_group_name f.store_generic_attributes(ga, False) else: self.logger.warning("Could not find ViewModel H5 file for op: {}".format(operation)) # 2. Update GenericAttributes in the associated H5 files: h5_path = h5.path_for_stored_index(datatype) with H5File.from_file(h5_path) as f: ga = f.load_generic_attributes() ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT] ga.state = new_data[DataTypeOverlayDetails.DATA_STATE] ga.operation_tag = new_group_name if DataTypeOverlayDetails.DATA_TAG_1 in new_data: ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1] if DataTypeOverlayDetails.DATA_TAG_2 in new_data: ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2] if DataTypeOverlayDetails.DATA_TAG_3 in new_data: ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3] if DataTypeOverlayDetails.DATA_TAG_4 in new_data: ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4] if DataTypeOverlayDetails.DATA_TAG_5 in new_data: ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5] f.store_generic_attributes(ga, False) # 3. Update MetaData in DT Index DB as well. datatype.fill_from_generic_attributes(ga) dao.store_entity(datatype)
def prepare_operations(self, user_id, project, algorithm, category, metadata, visible=True, existing_dt_group=None, view_model=None, **kwargs): """ Do all the necessary preparations for storing an operation. If it's the case of a range of values create an operation group and multiple operations for each possible instance from the range. :param metadata: Initial MetaData with potential Burst identification inside. """ # TODO: fix group operations operations = [] available_args, group = self._prepare_group(project.id, existing_dt_group, kwargs) if len(available_args) > TvbProfile.current.MAX_RANGE_NUMBER: raise LaunchException( "Too big range specified. You should limit the" " resulting operations to %d" % TvbProfile.current.MAX_RANGE_NUMBER) else: self.logger.debug("Launching a range with %d operations..." % len(available_args)) group_id = None if group is not None: group_id = group.id metadata, user_group = self._prepare_metadata(metadata, category, group, kwargs) self.logger.debug("Saving Operation(userId=" + str(user_id) + ",projectId=" + str(project.id) + "," + str(metadata) + ",algorithmId=" + str(algorithm.id) + ", ops_group= " + str(group_id) + ")") visible_operation = visible and category.display is False meta_str = json.dumps(metadata) for (one_set_of_args, range_vals) in available_args: range_values = json.dumps(range_vals) if range_vals else None operation = Operation(user_id, project.id, algorithm.id, json.dumps({'gid': view_model.gid.hex}), meta_str, op_group_id=group_id, user_group=user_group, range_values=range_values) operation.visible = visible_operation operations.append(operation) operations = dao.store_entities(operations) if group is not None: burst_id = None if DataTypeMetaData.KEY_BURST in metadata: burst_id = metadata[DataTypeMetaData.KEY_BURST] if existing_dt_group is None: datatype_group = DataTypeGroup( group, operation_id=operations[0].id, fk_parent_burst=burst_id, state=metadata[DataTypeMetaData.KEY_STATE]) dao.store_entity(datatype_group) else: # Reset count existing_dt_group.count_results = None dao.store_entity(existing_dt_group) for operation in operations: storage_path = FilesHelper().get_project_folder( project, str(operation.id)) h5_path = h5.path_for(storage_path, ViewModelH5, view_model.gid) h5_file = ViewModelH5(h5_path, view_model) h5_file.store(view_model) h5_file.close() return operations, group