Exemplo n.º 1
0
    def create_group(test_user=None, test_project=None, subject="John Doe"):
        """
        Create a group of 2 operations, each with at least one resultant DataType.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        ### Retrieve Adapter instance
        algo_group = dao.find_group('tvb_test.adapters.testadapter3',
                                    'TestAdapter3')
        algo_category = dao.get_category_by_id(algo_group.fk_category)
        algo = dao.get_algorithm_by_group(algo_group.id)

        adapter_inst = TestFactory.create_adapter(algo_group=algo_group,
                                                  test_project=test_project)
        adapter_inst.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: subject,
            DataTypeMetaData.KEY_STATE: "INTERMEDIATE"
        }
        args = {'first_range': 'param_5', 'param_5': [1, 2]}

        ### Prepare Operations group. Execute them synchronously
        service = OperationService()
        operations = service.prepare_operations(test_user.id, test_project.id,
                                                algo, algo_category, {},
                                                **args)[0]
        service.launch_operation(operations[0].id, False, adapter_inst)
        service.launch_operation(operations[1].id, False, adapter_inst)

        resulted_dts = dao.get_datatype_in_group(
            operations[0].fk_operation_group)
        return resulted_dts, operations[0].fk_operation_group
 def test_noise_2d_happy_flow(self):
     """
     Test a simulation with noise.
     """
     SIMULATOR_PARAMETERS['integrator'] = u'HeunStochastic'
     noise_2d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(2)]
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_2d_config)
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
     filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
     self.simulator_adapter.configure(**filtered_params)
     if hasattr(self.simulator_adapter, 'algorithm'):
         self.assertEqual((2, 74, 1), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
     else:
         self.fail("Simulator adapter was not initialized properly")
     OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
     sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
     self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = '[1]'
     filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
     self.simulator_adapter.configure(**filtered_params)
     if hasattr(self.simulator_adapter, 'algorithm'):
         self.assertEqual((1,), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
     else:
         self.fail("Simulator adapter was not initialized properly")
     OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
Exemplo n.º 3
0
 def setUp(self):
     """
     Sets up the environment for testing;
     creates a `FlowController`
     """
     BaseControllersTest.init(self)
     self.flow_c =  FlowController()
     self.burst_c = BurstController()
     self.operation_service = OperationService()
Exemplo n.º 4
0
 def setUp(self):
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Exemplo n.º 5
0
 def cancel_all_operations(self):
     """
     To make sure that no running operations are left which could make some other
     test started afterwards to fail, cancel all operations after each test.
     """
     LOGGER.info("Stopping all operations.")
     op_service = OperationService()
     operations = self.get_all_entities(model.Operation)
     for operation in operations:
         op_service.stop_operation(operation.id)
Exemplo n.º 6
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     initialize_storage()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation_service = OperationService()
     self.backup_hdd_size = TVBSettings.MAX_DISK_SPACE
Exemplo n.º 7
0
    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted), 2, "Problems when inserting data")

        #create a value wrapper
        self._create_value_wrapper()
        result = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(
            len(result), 2, "Should be two operations before export and not " +
            str(len(result)) + " !")
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")

        try:
            self.import_service.import_project_structure(
                self.zip_path, self.test_user.id)
            self.fail("Invalid import as the project already exists!")
        except ProjectImportException:
            #OK, do nothing. The project already exists.
            pass
Exemplo n.º 8
0
 def _burst_create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
     once that is done.
     """
     meta = {
         DataTypeMetaData.KEY_SUBJECT: "John Doe",
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     algorithm, algo_group = FlowService(
     ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.operation = model.Operation(self.test_user.id,
                                      self.test_project.id,
                                      algo_group.id,
                                      json.dumps(''),
                                      meta=json.dumps(meta),
                                      status="STARTED",
                                      method_name=ABCAdapter.LAUNCH_METHOD)
     self.operation = dao.store_entity(self.operation)
     storage_path = FilesHelper().get_project_folder(
         self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                           {})
     return algorithm.id, connectivity
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        algorithm = curent_operation.algorithm
        algorithm_group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error=True,
                                                  error_message=str(excep))
Exemplo n.º 10
0
    def _async_launch_and_prepare(self, burst_config, simulator_index,
                                  simulator_id, user_id):
        """
        Prepare operations asynchronously.
        """
        try:
            operation_ids = self._prepare_operations(burst_config,
                                                     simulator_index,
                                                     simulator_id, user_id)
            self.logger.debug("Starting a total of %s workflows" %
                              (len(operation_ids, )))
            wf_errs = 0
            for operation_id in operation_ids:
                try:
                    OperationService().launch_operation(operation_id, True)
                except Exception, excep:
                    self.logger.error(excep)
                    wf_errs += 1
                    self.workflow_service.mark_burst_finished(
                        burst_config, error=True, error_message=str(excep))

            self.logger.debug("Finished launching workflows. " +
                              str(len(operation_ids) - wf_errs) +
                              " were launched successfully, " + str(wf_errs) +
                              " had error on pre-launch steps")
 def test_happy_flow_launch(self):
     """
     Test that launching a simulation from UI works.
     """
     OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
     sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
     self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
Exemplo n.º 12
0
    def fire_operation(self, adapter_instance, current_user, project_id,  
                       method_name=ABCAdapter.LAUNCH_METHOD, visible=True, **data):
        """
        Launch an operation, specified by AdapterInstance, for CurrentUser, 
        Current Project and a given set of UI Input Data.
        """
        operation_name = str(adapter_instance.__class__.__name__) + "." + method_name
        try:
#            if OperationService.ATT_UID in data:
#                existent = dao.get_last_data_with_uid(data[OperationService.ATT_UID])
#                if existent is not None:
#                    self.create_link(existent, project_id)
#                    return "Created required links."
            self.logger.info("Starting operation " + operation_name)
            project = dao.get_project_by_id(project_id)
            tmp_folder = self.file_helper.get_project_folder(project, self.file_helper.TEMP_FOLDER)
            
            result = OperationService().initiate_operation(current_user, project.id, adapter_instance, 
                                                           tmp_folder, method_name, visible, **data)
            self.logger.info("Finished operation:" + operation_name)
            return result

        except TVBException, excep:
            self.logger.error("Could not launch operation " + operation_name + " with the given set of input data!")
            self.logger.exception(excep)
            raise OperationException(excep.message, excep)
Exemplo n.º 13
0
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Exemplo n.º 14
0
 def stop_operation(self, operation_id, is_group):
     """
     Stop the operation given by operation_id. If is_group is true stop all the
     operations from that group.
     """
     operation_service = OperationService()
     result = False
     if int(is_group) == 0:
         result = operation_service.stop_operation(operation_id)
     else:
         operation_id = operation_id.split(',')[0]
         operation = self.flow_service.load_operation(operation_id)
         operations_in_group = ProjectService.get_operations_in_group(
             operation.operation_group)
         for operation in operations_in_group:
             tmp_res = operation_service.stop_operation(operation.id)
             result = result or tmp_res
     return result
Exemplo n.º 15
0
 def create_connectivity_measure(self, connectivity):
     """
     :return: persisted entity ConnectivityMeasure
     """
     operation, _, storage_path = self.__create_operation()
     conn_measure = ConnectivityMeasure(storage_path=storage_path)
     conn_measure.connectivity = connectivity
     adapter_instance = StoreAdapter([conn_measure])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return conn_measure
Exemplo n.º 16
0
 def create_covariance(self, time_series):
     """
     :return: a stored DataType Covariance.
     """
     operation, _, storage_path = self.__create_operation()
     covariance = Covariance(storage_path=storage_path, source=time_series)
     covariance.write_data_slice(numpy.random.random((10, 10, 10)))
     adapter_instance = StoreAdapter([covariance])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return covariance
Exemplo n.º 17
0
 def _store_entity(self, entity, type_, module):
     """Launch adapter to store a create a persistent DataType."""
     entity.type = type_
     entity.module = module
     entity.subject = "John Doe"
     entity.state = "RAW_STATE"
     entity.set_operation_id(self.operation.id)
     adapter_instance = StoreAdapter([entity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                           {})
Exemplo n.º 18
0
 def create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     """
     operation, algo_id, storage_path = self.__create_operation()
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return algo_id, connectivity
Exemplo n.º 19
0
 def stop_operation(self, operation_id, is_group, remove_after_stop=False):
     """
     Stop the operation given by operation_id. If is_group is true stop all the
     operations from that group.
     """
     operation_service = OperationService()
     result = False
     if int(is_group) == 0:
         result = operation_service.stop_operation(operation_id)
         if remove_after_stop:
             ProjectService().remove_operation(operation_id)
     else:
         op_group = ProjectService.get_operation_group_by_id(operation_id)
         operations_in_group = ProjectService.get_operations_in_group(
             op_group)
         for operation in operations_in_group:
             tmp_res = operation_service.stop_operation(operation.id)
             if remove_after_stop:
                 ProjectService().remove_operation(operation.id)
             result = result or tmp_res
     return result
 def _create_connectivity(self, nodes_number):
     """
     Create a connectivity entity and return its GID
     """
     storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((nodes_number, nodes_number))
     connectivity.centres = numpy.ones((nodes_number, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
     
     return dao.get_datatype_by_id(connectivity.id).gid
Exemplo n.º 21
0
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")
        self.assertEqual(len(connectivities), 1)
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) == len(
                ts_metric_adapter.available_algorithms.keys()),
            "A result should have been generated for every metric.")
Exemplo n.º 22
0
    def _store_datatype(self, data_type, operation_id=None):
        """
        Launch adapter to store a create a persistent DataType.
        """
        operation_id = operation_id or self.operation.id
        data_type.type = data_type.__class__.__name__
        data_type.module = data_type.__class__.__module__
        data_type.subject = self.USER_FULL_NAME
        data_type.state = self.DATATYPE_STATE
        data_type.set_operation_id(operation_id)

        adapter_instance = StoreAdapter([data_type])
        operation = dao.get_operation_by_id(operation_id)
        OperationService().initiate_prelaunch(operation, adapter_instance, {})

        return data_type
Exemplo n.º 23
0
 def create_crosscorrelation(self, time_series):
     """
     :return CrossCorrelation stored entity.
     """
     operation, _, storage_path = self.__create_operation()
     partial_corr = CrossCorrelation(array_data=numpy.random.random(
         (10, 10, 10, 10, 10)),
                                     use_storage=False)
     crossc = CrossCorrelation(source=time_series,
                               storage_path=storage_path,
                               time=range(10))
     crossc.write_data_slice(partial_corr)
     crossc.close_file()
     adapter_instance = StoreAdapter([crossc])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return crossc
Exemplo n.º 24
0
 def create_crosscoherence(self, time_series):
     """
     :return: a stored entity of type CoherenceSpectrum
     """
     operation, _, storage_path = self.__create_operation()
     partial_coh = CoherenceSpectrum(array_data=numpy.random.random(
         (10, 10, 10, 10)),
                                     use_storage=False)
     coherence = CoherenceSpectrum(source=time_series,
                                   storage_path=storage_path,
                                   frequency=0.1,
                                   nfft=256)
     coherence.write_data_slice(partial_coh)
     coherence.close_file()
     adapter_instance = StoreAdapter([coherence])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return coherence
Exemplo n.º 25
0
 def test_remove_array_wrapper(self):
     """
     Tests the happy flow for the deletion of an array wrapper.
     """
     array_wrappers = self.flow_service.get_available_datatypes(
         self.test_project.id, "tvb.datatypes.arrays.MappedArray")
     self.assertEqual(len(array_wrappers), 1, "There should be no array")
     data = {'param_1': 'some value'}
     OperationService().initiate_prelaunch(self.operation,
                                           self.adapter_instance, {},
                                           **data)
     array_wrappers = self.flow_service.get_available_datatypes(
         self.test_project.id, "tvb.datatypes.arrays.MappedArray")
     self.assertEqual(len(array_wrappers), 2, "Should be only one array")
     arraygid = array_wrappers[0][2]
     self.project_service.remove_datatype(self.test_project.id, arraygid)
     res = dao.get_datatype_by_gid(arraygid)
     self.assertEqual(None, res, "The array wrapper was not deleted.")
 def test_noise_2d_bad_shape(self):
     """
     Test a simulation with noise. Pass a wrong shape and expect exception to be raised.
     """
     SIMULATOR_PARAMETERS['integrator'] = u'HeunStochastic'
     noise_4d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(4)]
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_4d_config)
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
     SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
     filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
     self.simulator_adapter.configure(**filtered_params)
     if hasattr(self.simulator_adapter, 'algorithm'):
         self.assertEqual((4, 74), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
     else:
         self.fail("Simulator adapter was not initialized properly")
     self.assertRaises(Exception, OperationService().initiate_prelaunch, self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
Exemplo n.º 27
0
 def create_timeseries(self, connectivity, ts_type=None, sensors=None):
     """
     Create a stored TimeSeries entity.
     """
     operation, _, storage_path = self.__create_operation()
     if ts_type == "EEG":
         time_series = TimeSeriesEEG(storage_path=storage_path,
                                     sensors=sensors)
     else:
         time_series = TimeSeriesRegion(storage_path=storage_path,
                                        connectivity=connectivity)
     data = numpy.random.random((10, 10, 10, 10))
     time = numpy.arange(10)
     time_series.write_data_slice(data)
     time_series.write_time_slice(time)
     adapter_instance = StoreAdapter([time_series])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     time_series = dao.get_datatype_by_gid(time_series.gid)
     return time_series
Exemplo n.º 28
0
 def create_surface(self):
     """
     Create a dummy surface entity.
     :return: (Algorithm Identifier, stored Surface entity)
     """
     operation, algo_id, storage_path = self.__create_operation()
     surface = CorticalSurface(storage_path=storage_path)
     surface.vertices = numpy.array(
         [[-10, 0, 0], [0, 0, -10], [10, 0, 0], [0, 10, 0]], dtype=float)
     surface.triangles = numpy.array(
         [[0, 1, 2], [0, 1, 3], [1, 2, 3], [0, 2, 3]], dtype=int)
     surface.number_of_triangles = 4
     surface.number_of_vertices = 4
     surface.triangle_normals = numpy.ones((4, 3))
     surface.vertex_normals = numpy.ones((4, 3))
     surface.zero_based_triangles = True
     adapter_instance = StoreAdapter([surface])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return algo_id, surface
Exemplo n.º 29
0
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """
        for i in xrange(len(self.bct_adapters)):
            for bct_identifier in self.bct_adapters[
                    i].get_algorithms_dictionary():
                ### Prepare Operation and parameters
                algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id,
                                                       bct_identifier)
                operation = TestFactory.create_operation(
                    algorithm=algorithm,
                    test_user=self.test_user,
                    test_project=self.test_project,
                    operation_status="STARTED")
                self.assertEqual("STARTED", operation.status)
                ### Launch BCT algorithm
                submit_data = {
                    self.algo_groups[i].algorithm_param_name: bct_identifier,
                    algorithm.parameter_name: self.connectivity.gid
                }
                try:
                    OperationService().initiate_prelaunch(
                        operation, self.bct_adapters[i], {}, **submit_data)
                    if bct_identifier in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise Exception(
                            "Algorithm %s was expected to throw input validation "
                            "exception, but did not!" % (bct_identifier, ))

                    operation = dao.get_operation_by_id(operation.id)
                    ### Check that operation status after execution is success.
                    self.assertEqual("FINISHED", operation.status)
                    ### Make sure at least one result exists for each BCT algorithm
                    results = dao.get_generic_entity(model.DataType,
                                                     operation.id,
                                                     'fk_from_operation')
                    self.assertTrue(len(results) > 0)

                except InvalidParameterException, excep:
                    ## Some algorithms are expected to throw validation exception.
                    if bct_identifier not in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
                        raise excep
Exemplo n.º 30
0
 def prepare_group_launch(self, group_gid, step_key, adapter_key, **data):
     """
     Recieves as input a group gid and an algorithm given by category and id, along 
     with data that gives the name of the required input parameter for the algorithm.
     Having these generate a range of gid's for all the datatypes in the group and
     launch a new operation group.
     """
     prj_service = ProjectService()
     dt_group = prj_service.get_datatypegroup_by_gid(group_gid)
     datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id)
     range_param_name = data['range_param_name']
     del data['range_param_name']
     data[PARAM_RANGE_1] = range_param_name
     data[range_param_name] = ','.join([dt.gid for dt in datatypes])
     OperationService().group_operation_launch(
         base.get_logged_user().id,
         base.get_current_project().id, int(adapter_key), int(step_key),
         **data)
     redirect_url = self._compute_back_link('operations',
                                            base.get_current_project())
     raise cherrypy.HTTPRedirect(redirect_url)