def test_fire_operation(self): """ Test preparation of an adapter and launch mechanism. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.dummy_adapter1", "DummyAdapter1") test_user = TestFactory.create_user(username="******") test_project = TestFactory.create_project(admin=test_user, name="test_project_fire_sim") result = OperationService().fire_operation(adapter, test_user, test_project.id, view_model=adapter.get_view_model()()) assert result.endswith("has finished."), "Operation fail"
def _launch_and_check_noise(self, params, expected_noise_shape): filtered_params = self.simulator_adapter.prepare_ui_inputs(params) self.simulator_adapter.configure(**filtered_params) if hasattr(self.simulator_adapter, 'algorithm'): self.assertEqual(expected_noise_shape, self.simulator_adapter.algorithm.integrator.noise.nsig.shape) else: self.fail("Simulator adapter was not initialized properly") OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **params)
def test_simulation_with_stimulus(self, stimulus_factory): """ Test a simulation with noise. """ params = copy(SIMULATOR_PARAMETERS) params["stimulus"] = stimulus_factory.gid filtered_params = self.simulator_adapter.prepare_ui_inputs(params) self.simulator_adapter.configure(**filtered_params) OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, **params)
def do_operation_launch(operation_id): """ Event attached to the local queue for executing an operation, when we will have resources available. """ log = get_logger('tvb.core.operation_async_launcher') burst_service = BurstService() try: log.debug("Loading operation with id=%s" % operation_id) curent_operation = dao.get_operation_by_id(operation_id) stored_adapter = curent_operation.algorithm log.debug("Importing Algorithm: " + str(stored_adapter.classname) + " for Operation:" + str(curent_operation.id)) adapter_instance = ABCAdapter.build_adapter(stored_adapter) # Un-comment bellow for profiling an operation: # import cherrypy.lib.profiler as profiler # p = profiler.Profiler("/Users/lia.domide/TVB/profiler/") # p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS) OperationService().initiate_prelaunch(curent_operation, adapter_instance) if curent_operation.fk_operation_group: parent_burst = dao.get_generic_entity( BurstConfiguration, curent_operation.fk_operation_group, 'fk_operation_group')[0] operations_in_group = dao.get_operations_in_group( curent_operation.fk_operation_group) if parent_burst.fk_metric_operation_group: operations_in_group.extend( dao.get_operations_in_group( parent_burst.fk_metric_operation_group)) burst_finished = True for operation in operations_in_group: if not has_finished(operation.status): burst_finished = False break if burst_finished and parent_burst is not None and parent_burst.status != BurstConfiguration.BURST_ERROR: burst_service.mark_burst_finished(parent_burst) else: parent_burst = burst_service.get_burst_for_operation_id( operation_id) if parent_burst is not None: burst_service.mark_burst_finished(parent_burst) log.debug("Successfully finished operation " + str(operation_id)) except Exception as excep: log.error("Could not execute operation " + str(operation_id)) log.exception(excep) parent_burst = burst_service.get_burst_for_operation_id(operation_id) if parent_burst is not None: burst_service.mark_burst_finished(parent_burst, error_message=str(excep))
def test_adapter_launch(self): """ Test that the adapters launches and successfully generates a datatype measure entry. """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } algo_group = FlowService().get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS)[1] self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10) dummy_time = numpy.arange(1, 11) # Get connectivity connectivities = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0] self.assertEqual(2, len(connectivities)) connectivity_gid = connectivities[0][2] dummy_time_series = TimeSeriesRegion() dummy_time_series.storage_path = storage_path dummy_time_series.write_data_slice(dummy_input) dummy_time_series.write_time_slice(dummy_time) dummy_time_series.close_file() dummy_time_series.start_time = 0.0 dummy_time_series.sample_period = 1.0 dummy_time_series.connectivity = connectivity_gid adapter_instance = StoreAdapter([dummy_time_series]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__, dummy_time_series.gid, 'gid')[0] ts_metric_adapter = TimeseriesMetricsAdapter() resulted_metric = ts_metric_adapter.launch(dummy_time_series) self.assertTrue(isinstance(resulted_metric, DatatypeMeasure), "Result should be a datatype measure.") self.assertTrue( len(resulted_metric.metrics) >= len( ts_metric_adapter.available_algorithms.keys()), "At least a result should have been generated for every metric.") for metric_value in resulted_metric.metrics.values(): self.assertTrue(isinstance(metric_value, (float, int)))
def create_connectivity(self, nodes=74): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). """ operation, algo_id, storage_path = self.__create_operation() connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((nodes, nodes)) connectivity.centres = numpy.ones((nodes, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return algo_id, connectivity
def import_conn_zip(project_id, zip_path): project = dao.get_project_by_id(project_id) importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter) view_model = ZIPConnectivityImporterModel() view_model.uploaded = zip_path OperationService().fire_operation(importer, project.administrator, project_id, view_model=view_model)
def test_happy_flow_launch(self): """ Test that launching a simulation from UI works. """ OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS) sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0] assert sim_result.read_data_shape() == (32, 1, self.CONNECTIVITY_NODES, 1)
def create_crosscoherence(self, time_series): """ :returns: a stored entity of type CoherenceSpectrum """ operation, _, storage_path = self.__create_operation() partial_coh = CoherenceSpectrum(array_data=numpy.random.random((10, 10, 10, 10)), use_storage=False) coherence = CoherenceSpectrum(source=time_series, storage_path=storage_path, frequency=0.1, nfft=256) coherence.write_data_slice(partial_coh) coherence.close_file() adapter_instance = StoreAdapter([coherence]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return coherence
def fire_operation(project_id, adapter_instance, view_model): TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE) project = dao.get_project_by_id(project_id) # launch an operation and have the results stored both in DB and on disk launched_operation = OperationService().fire_operation( adapter_instance, project.administrator, project.id, view_model=view_model)[0] LOG.info("Operation launched....") return launched_operation
def async_launch_simulation_on_server(self, operation, zip_folder_path, storage_operation_path): try: for file in os.listdir(zip_folder_path): shutil.move(os.path.join(zip_folder_path, file), storage_operation_path) try: OperationService().launch_operation(operation.id, True) shutil.rmtree(zip_folder_path) return operation except Exception as excep: self.logger.error(excep) except Exception as excep: self.logger.error(excep)
def create_crosscorrelation(self, time_series): """ :returns: `CrossCorrelation` stored entity. """ operation, _, storage_path = self.__create_operation() partial_corr = CrossCorrelation(array_data=numpy.random.random((10, 10, 10, 10, 10)), use_storage=False) crossc = CrossCorrelation(source=time_series, storage_path=storage_path, time=range(10)) crossc.write_data_slice(partial_corr) crossc.close_file() adapter_instance = StoreAdapter([crossc]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return crossc
def test_adapter_huge_memory_requirement(self): """ Test that an MemoryException is raised in case adapter cant launch due to lack of memory. """ adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHugeMemoryRequired") data = {"test": 5} operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, json.dumps(data), json.dumps({}), status=model.STATUS_STARTED) operation = dao.store_entity(operation) self.assertRaises(NoMemoryAvailableException, OperationService().initiate_prelaunch, operation, adapter, {})
def launch_operation(self, current_user_id, model_file, project_gid, algorithm_module, algorithm_classname, fetch_file): temp_folder = FilesHelper.create_temp_folder() model_h5_path = FilesHelper.save_temporary_file( model_file, temp_folder) try: project = self.project_service.find_project_lazy_by_gid( project_gid) except ProjectServiceException: raise InvalidIdentifierException() algorithm = FlowService.get_algorithm_by_module_and_class( algorithm_module, algorithm_classname) if algorithm is None: raise InvalidIdentifierException( 'No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname)) try: adapter_instance = ABCAdapter.build_adapter(algorithm) view_model = adapter_instance.get_view_model_class()() view_model_h5 = ViewModelH5(model_h5_path, view_model) view_model_gid = view_model_h5.gid.load() operation = self.operation_service.prepare_operation( current_user_id, project.id, algorithm.id, algorithm.algorithm_category, view_model_gid.hex, None, {}) storage_path = self.files_helper.get_project_folder( project, str(operation.id)) if isinstance(adapter_instance, ABCUploader): for key, value in adapter_instance.get_form_class( ).get_upload_information().items(): data_file = fetch_file(request_file_key=key, file_extension=value) data_file_path = FilesHelper.save_temporary_file( data_file, temp_folder) file_name = os.path.basename(data_file_path) upload_field = getattr(view_model_h5, key) upload_field.store(os.path.join(storage_path, file_name)) shutil.move(data_file_path, storage_path) shutil.move(model_h5_path, storage_path) os.rmdir(temp_folder) view_model_h5.close() OperationService().launch_operation(operation.id, True) return operation.gid except Exception as excep: self.logger.error(excep, exc_info=True) raise ServiceException(str(excep))
def import_conn_zip(project_id, zip_path): TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE) project = dao.get_project_by_id(project_id) importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter) view_model = ZIPConnectivityImporterModel() view_model.uploaded = zip_path return OperationService().fire_operation(importer, project.administrator, project_id, view_model=view_model)[0]
def setUp(self): """ Sets up the testing environment; saves config file; creates a test user, a test project; creates burst, operation, flow and workflow services """ self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.workflow_service = WorkflowService() self.burst_service = BurstService() self.operation_service = OperationService() self.flow_service = FlowService()
def cancel_all_operations(self): """ To make sure that no running operations are left which could make some other test started afterwards to fail, cancel all operations after each test. """ LOGGER.info("Stopping all operations.") op_service = OperationService() operations = self.get_all_entities(Operation) for operation in operations: try: op_service.stop_operation(operation.id) except Exception: # Ignore potential wrongly written operations by other unit-tests pass
def test_remove_array_wrapper(self): """ Tests the happy flow for the deletion of an array wrapper. """ count_array = self.count_all_entities(MappedArray) self.assertEqual(1, count_array) data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) array_wrappers = self.get_all_entities(MappedArray) self.assertEqual(2, len(array_wrappers)) array_gid = array_wrappers[0].gid self.project_service.remove_datatype(self.test_project.id, array_gid) res = dao.get_datatype_by_gid(array_gid) self.assertEqual(None, res, "The array wrapper was not deleted.")
def launch_importer(importer_class, view_model, user, project, same_process=True): # type: (type, ViewModel, User, Project, bool) -> None """ same_process = False will do the normal flow, with Uploaders running synchronously but in a different process. This branch won't be compatible with usage in subclasses of TransactionalTestCase because the upload results won't be available for the unit-test running. same_process = True for usage in subclasses of TransactionalTestCase, as data preparation, for example. Won't test the "real" upload flow, but it is very close to that. """ importer = ABCAdapter.build_adapter_from_class(importer_class) if same_process: TestFactory.launch_synchronously(user, project, importer, view_model) else: OperationService().fire_operation(importer, user, project.id, view_model=view_model)
def __init__(self, overwrites=None, settings_file=None): """ Parameters can be overwritten either from a settigns file or from a dictionary. """ if overwrites is not None: self.overwrites.update(overwrites) if settings_file is not None: settings = open(sys.argv[1]).read() for line in settings.split('\n'): key, value = line.split('=') self.overwrites[key.strip()] = value.strip() if KEY_PROJECT not in self.overwrites: raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT) self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT]) self.flow_service = FlowService() self.operation_service = OperationService()
def _create_connectivity(self, nodes_number): """ Create a connectivity entity and return its GID """ storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((nodes_number, nodes_number)) connectivity.centres = numpy.ones((nodes_number, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) return dao.get_datatype_by_id(connectivity.id).gid
def __init__(self): BaseController.__init__(self) self.operation_service = OperationService() self.logger = get_logger(__name__) editable_entities = [ dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus', subsection='regionstim', description='Create a new Stimulus on Region level'), dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus', subsection='surfacestim', description='Create a new Stimulus on Surface level') ] self.submenu_list = editable_entities
def launch_synchronously(test_user, test_project, adapter_instance, view_model, algo_category=None): # Avoid the scheduled execution, as this is asynch, thus launch it immediately service = OperationService() algorithm = adapter_instance.stored_adapter if algo_category is None: algo_category = dao.get_category_by_id(algorithm.fk_category) operation = service.prepare_operations(test_user.id, test_project, algorithm, algo_category, True, view_model=view_model)[0][0] service.initiate_prelaunch(operation, adapter_instance) operation = dao.get_operation_by_id(operation.id) # Check that operation status after execution is success. assert STATUS_FINISHED == operation.status # Make sure at least one result exists for each BCT algorithm return dao.get_generic_entity(DataType, operation.id, 'fk_from_operation')
def import_conn_h5(project_id, h5_path): project = dao.get_project_by_id(project_id) TvbProfile.set_profile(TvbProfile.COMMAND_PROFILE) now = datetime.now() date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour, now.minute, now.second, now.microsecond) uq_name = "%s-Connectivity" % date_str new_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, uq_name) StorageInterface.copy_file(h5_path, new_path) importer = ABCAdapter.build_adapter_from_class(TVBImporter) view_model = importer.get_view_model_class()() view_model.data_file = new_path return OperationService().fire_operation(importer, project.administrator, project_id, view_model=view_model)
def test_adapter_huge_memory_requirement(self): """ Test that an MemoryException is raised in case adapter cant launch due to lack of memory. """ module = "tvb.tests.framework.adapters.testadapter3" class_name = "TestAdapterHugeMemoryRequired" algo_group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(algo_group) data = {"test": 5} operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(data), json.dumps({}), status=model.STATUS_STARTED, method_name=ABCAdapter.LAUNCH_METHOD) operation = dao.store_entity(operation) self.assertRaises(NoMemoryAvailableException, OperationService().initiate_prelaunch, operation, adapter, {})
def create_datatype_measure(self, analyzed_entity, operation=None, storage_path=None): """ :return: persisted DatatypeMeasure """ if operation is None: operation, _, storage_path = self.__create_operation() measure = DatatypeMeasure(storage_path=storage_path, metrics=self.DATATYPE_MEASURE_METRIC) measure.analyzed_datatype = analyzed_entity adapter_instance = StoreAdapter([measure]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return measure
def create_stimulus(self, connectivity): """ :param connectivity: Connectivity to create stimuli for its regions :return: persisted region Stimuli instance """ operation, _, storage_path = self.__create_operation() stimuli_region = StimuliRegion(storage_path=storage_path) stimuli_region.connectivity = connectivity stimuli_region.weight = numpy.random.random((connectivity.number_of_regions, 1)).tolist() stimuli_region.temporal = PulseTrain() adapter_instance = StoreAdapter([stimuli_region]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return stimuli_region
def post(self, project_gid, algorithm_module, algorithm_classname): """ :generic method of launching Analyzers """ model_file = self.extract_file_from_request(request_file_key=RequestFileKey.LAUNCH_ANALYZERS_MODEL_FILE.value) destination_folder = RestResource.get_destination_folder() h5_path = RestResource.save_temporary_file(model_file, destination_folder) try: project = self.project_service.find_project_lazy_by_gid(project_gid) except ProjectServiceException: raise InvalidIdentifierException(INVALID_PROJECT_GID_MESSAGE % project_gid) algorithm = FlowService.get_algorithm_by_module_and_class(algorithm_module, algorithm_classname) if algorithm is None: raise InvalidIdentifierException('No algorithm found for: %s.%s' % (algorithm_module, algorithm_classname)) try: adapter_instance = ABCAdapter.build_adapter(algorithm) view_model = adapter_instance.get_view_model_class()() view_model_h5 = ViewModelH5(h5_path, view_model) view_model_gid = view_model_h5.gid.load() current_user = get_current_user() operation = self.operation_service.prepare_operation(current_user.id, project.id, algorithm.id, algorithm.algorithm_category, view_model_gid.hex, None, {}) storage_path = self.files_helper.get_project_folder(project, str(operation.id)) if isinstance(adapter_instance, ABCUploader): for key, value in adapter_instance.get_form_class().get_upload_information().items(): data_file = self.extract_file_from_request(request_file_key=key, file_extension=value) data_file_path = RestResource.save_temporary_file(data_file, destination_folder) file_name = os.path.basename(data_file_path) upload_field = getattr(view_model_h5, key) upload_field.store(os.path.join(storage_path, file_name)) shutil.move(data_file_path, storage_path) shutil.move(h5_path, storage_path) os.rmdir(destination_folder) view_model_h5.close() OperationService().launch_operation(operation.id, True) except Exception as excep: self.logger.error(excep, exc_info=True) raise ServiceException(str(excep)) return operation.gid, HTTP_STATUS_CREATED
def test_happy_flow_launch(self, datatype_factory, connectivity_factory): """ Test that launching a simulation from UI works. """ self.test_user = datatype_factory['user'] self.test_project = datatype_factory['project'] self.connectivity = connectivity_factory(self.CONNECTIVITY_NODES)[1] self.operation = TestFactory.create_operation(datatype_factory['algorithm'], self.test_user, self.test_project, STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS)) SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, **SIMULATOR_PARAMETERS) sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0] assert sim_result.read_data_shape() == (32, 1, self.CONNECTIVITY_NODES, 1)
def _store_datatype(self, data_type, operation_id=None): """ Launch adapter to store a create a persistent DataType. """ operation_id = operation_id or self.operation.id data_type.type = data_type.__class__.__name__ data_type.module = data_type.__class__.__module__ data_type.subject = self.USER_FULL_NAME data_type.state = self.DATATYPE_STATE data_type.set_operation_id(operation_id) adapter_instance = StoreAdapter([data_type]) operation = dao.get_operation_by_id(operation_id) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return data_type