Esempio n. 1
0
    def build(test_user=None,
              test_project=None,
              is_simulation=False,
              store_vm=False,
              operation_status=STATUS_FINISHED,
              range_values=None,
              conn_gid=None):
        """
        Create persisted operation with a ViewModel stored
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        vm_gid = uuid.uuid4()
        view_model = None

        if is_simulation:
            algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE,
                                                    SIMULATOR_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.connectivity = connectivity_factory(
                    4).gid if conn_gid is None else conn_gid
                vm_gid = view_model.gid

        else:
            algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                    TVB_IMPORTER_CLASS)
            if store_vm:
                adapter = ABCAdapter.build_adapter(algorithm)
                view_model = adapter.get_view_model_class()()
                view_model.data_file = "."
                vm_gid = view_model.gid

        operation = Operation(vm_gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)

        if store_vm:
            op_folder = FilesHelper().get_project_folder(
                test_project, str(operation.id))
            h5.store_view_model(view_model, op_folder)

        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 2
0
    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        test_adapter_factory()
        adapter = TestFactory.create_adapter(module, class_name)
        output = adapter.get_output()
        output_type = output[0].__name__
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        view_model = adapter.get_view_model()()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter,
                                                  tmp_folder, model_view=view_model)

        group = dao.get_algorithm_by_module(module, class_name)
        assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
        assert group.classname == 'TestAdapter1', "Wrong data stored."
        dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        assert datatype.type == output_type, "Wrong data stored."
    def __read_adapters(self, category_key, module_name):
        """
        Add or update lines into STORED_ADAPTERS table:
        One line for each custom class found which is extending from ABCAdapter.
        """
        for adapters_file in Introspector.__read_module_variable(module_name):
            try:
                adapters_module = __import__(module_name + "." + adapters_file, globals(), locals(), [adapters_file])
                for ad_class in dir(adapters_module):
                    ad_class = adapters_module.__dict__[ad_class]
                    if Introspector._is_concrete_subclass(ad_class, ABCAdapter):
                        if ad_class.can_be_active():
                            stored_adapter = model.Algorithm(ad_class.__module__, ad_class.__name__, category_key,
                                                                 ad_class.get_group_name(), ad_class.get_group_description(),
                                                                 ad_class.get_ui_name(), ad_class.get_ui_description(),
                                                                 ad_class.get_ui_subsection(), datetime.datetime.now())
                            adapter_inst = ad_class()
                            in_params = adapter_inst.get_input_tree()
                            req_type, param_name, flt = self.__get_required_input(in_params)
                            stored_adapter.required_datatype = req_type
                            stored_adapter.parameter_name = param_name
                            stored_adapter.datatype_filter = flt
                            stored_adapter.outputlist = str(adapter_inst.get_output())

                            inst_from_db = dao.get_algorithm_by_module(ad_class.__module__, ad_class.__name__)
                            if inst_from_db is not None:
                                stored_adapter.id = inst_from_db.id

                            stored_adapter = dao.store_entity(stored_adapter, inst_from_db is not None)
                            ad_class.stored_adapter = stored_adapter
                        else:
                            self.logger.warning("Skipped Adapter(probably because MATLAB not found):" + str(ad_class))

            except Exception:
                self.logger.exception("Could not introspect Adapters file:" + adapters_file)
Esempio n. 4
0
    def __read_adapters(self, category_key, module_name):
        """
        Add or update lines into STORED_ADAPTERS table:
        One line for each custom class found which is extending from ABCAdapter.
        """
        for adapters_file in Introspector.__read_module_variable(module_name):
            try:
                adapters_module = __import__(module_name + "." + adapters_file, globals(), locals(), [adapters_file])
                for ad_class in dir(adapters_module):
                    ad_class = adapters_module.__dict__[ad_class]
                    if Introspector._is_concrete_subclass(ad_class, ABCAdapter):
                        if ad_class.can_be_active():
                            stored_adapter = model.Algorithm(ad_class.__module__, ad_class.__name__, category_key,
                                                                 ad_class.get_group_name(), ad_class.get_group_description(),
                                                                 ad_class.get_ui_name(), ad_class.get_ui_description(),
                                                                 ad_class.get_ui_subsection(), datetime.datetime.now())
                            adapter_inst = ad_class()
                            in_params = adapter_inst.get_input_tree()
                            req_type, param_name, flt = self.__get_required_input(in_params)
                            stored_adapter.required_datatype = req_type
                            stored_adapter.parameter_name = param_name
                            stored_adapter.datatype_filter = flt
                            stored_adapter.outputlist = str(adapter_inst.get_output())

                            inst_from_db = dao.get_algorithm_by_module(ad_class.__module__, ad_class.__name__)
                            if inst_from_db is not None:
                                stored_adapter.id = inst_from_db.id

                            stored_adapter = dao.store_entity(stored_adapter, inst_from_db is not None)
                            ad_class.stored_adapter = stored_adapter
                        else:
                            self.logger.warning("Skipped Adapter(probably because MATLAB not found):" + str(ad_class))

            except Exception:
                self.logger.exception("Could not introspect Adapters file:" + adapters_file)
Esempio n. 5
0
    def _export_linked_datatypes(self, project, zip_file):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(
            project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                           TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        op_folder = self.files_helper.get_operation_folder(
            op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        self.files_helper.remove_folder(op_folder)
Esempio n. 6
0
    def __init__(self):
        micro_postfix = "_%d" % int(time.time() * 1000000)

        # Here create all structures needed later for data types creation
        self.files_helper = FilesHelper()

        # First create user 
        user = model.User("datatype_factory_user" + micro_postfix, "test_pass",
                          "*****@*****.**" + micro_postfix, True, "user")
        self.user = dao.store_entity(user)

        # Now create a project
        project_service = ProjectService()
        data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[])
        self.project = project_service.store_project(self.user, True, None, **data)

        # Create algorithm
        alg_category = model.AlgorithmCategory('one', True)
        dao.store_entity(alg_category)
        ad = model.Algorithm(SIMULATOR_MODULE, SIMULATOR_CLASS, alg_category.id)
        self.algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        if self.algorithm is None:
            self.algorithm = dao.store_entity(ad)

        # Create an operation
        self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME,
                     DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE}
        operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters',
                                    meta=json.dumps(self.meta), status=model.STATUS_FINISHED)
        self.operation = dao.store_entity(operation)
Esempio n. 7
0
    def create_operation(algorithm=None, test_user=None, test_project=None, 
                         operation_status=model.STATUS_FINISHED, parameters="test params"):
        """
        Create persisted operation.
        
        :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
        :return: Operation entity after persistence. 
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                                    'NDimensionArrayAdapter')

        if test_user is None:
            test_user = TestFactory.create_user()
            
        if test_project is None:
            test_project = TestFactory.create_project(test_user)
            
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
                DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                    status=operation_status)
        dao.store_entity(operation)
        ### Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     adapter = TestFactory.create_adapter(module, class_name)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.get_algorithm_by_module(module, class_name)
     self.assertEqual(group.module,
                      'tvb.tests.framework.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id,
                                             Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Esempio n. 9
0
    def build(algorithm=None,
              test_user=None,
              test_project=None,
              operation_status=STATUS_FINISHED,
              parameters="test params"):
        """
        Create persisted operation.
        :param algorithm: When not None, Simulator.
        :return: Operation entity after persistence.
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module(
                'tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter')
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        operation = Operation(test_user.id,
                              test_project.id,
                              algorithm.id,
                              parameters,
                              meta=json.dumps(meta),
                              status=operation_status)
        dao.store_entity(operation)
        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
Esempio n. 10
0
    def build(algorithm=None,
              test_user=None,
              test_project=None,
              operation_status=STATUS_FINISHED,
              parameters="test params",
              range_values=None):
        """
        Create persisted operation.
        :param algorithm: When not None, Simulator.
        :return: Operation entity after persistence.
        """
        if algorithm is None:
            algorithm = dao.get_algorithm_by_module(
                'tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter')
        if test_user is None:
            test_user = user_factory()
        if test_project is None:
            test_project = project_factory(test_user)

        operation = Operation(test_user.id,
                              test_project.id,
                              algorithm.id,
                              parameters,
                              status=operation_status,
                              range_values=range_values)
        dao.store_entity(operation)
        # Make sure lazy attributes are correctly loaded.
        return dao.get_operation_by_id(operation.id)
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 0

        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter',
                                            'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        # create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 1

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count == 2

        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        assert count == 3

        return array_wrappers
Esempio n. 12
0
    def build(adapter_class=DummyAdapter1):

        all_categories = dao.get_algorithm_categories()
        algo_category_id = all_categories[0].id

        stored_adapter = Algorithm(adapter_class.__module__, adapter_class.__name__, algo_category_id,
                                   adapter_class.get_group_name(), adapter_class.get_group_description(),
                                   adapter_class.get_ui_name(), adapter_class.get_ui_description(),
                                   adapter_class.get_ui_subsection(), datetime.now())
        adapter_inst = adapter_class()

        adapter_form = adapter_inst.get_form()
        required_datatype = adapter_form.get_required_datatype()
        if required_datatype is not None:
            required_datatype = required_datatype.__name__
        filters = adapter_form.get_filters()
        if filters is not None:
            filters = filters.to_json()

        stored_adapter.required_datatype = required_datatype
        stored_adapter.datatype_filter_filter = filters
        stored_adapter.parameter_name = adapter_form.get_input_name()
        stored_adapter.outputlist = str(adapter_inst.get_output())

        inst_from_db = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__)
        if inst_from_db is not None:
            stored_adapter.id = inst_from_db.id

        return dao.store_entity(stored_adapter, inst_from_db is not None)
    def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
Esempio n. 14
0
    def _populate_algorithms(self, algo_category_class, algo_category_id):
        for adapter_class in self.introspection_registry.ADAPTERS[algo_category_class]:
            try:
                if not adapter_class.can_be_active():
                    self.logger.warning("Skipped Adapter(probably because MATLAB not found):" + str(adapter_class))

                stored_adapter = Algorithm(adapter_class.__module__, adapter_class.__name__, algo_category_id,
                                           adapter_class.get_group_name(), adapter_class.get_group_description(),
                                           adapter_class.get_ui_name(), adapter_class.get_ui_description(),
                                           adapter_class.get_ui_subsection(), datetime.datetime.now())
                adapter_inst = adapter_class()

                adapter_form = adapter_inst.get_form()
                required_datatype = adapter_form.get_required_datatype()
                if required_datatype is not None:
                    required_datatype = required_datatype.__name__
                filters = adapter_form.get_filters()
                if filters is not None:
                    filters = filters.to_json()

                stored_adapter.required_datatype = required_datatype
                stored_adapter.datatype_filter = filters
                stored_adapter.parameter_name = adapter_form.get_input_name()
                stored_adapter.outputlist = str(adapter_inst.get_output())

                inst_from_db = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__)
                if inst_from_db is not None:
                    stored_adapter.id = inst_from_db.id

                stored_adapter = dao.store_entity(stored_adapter, inst_from_db is not None)
                adapter_class.stored_adapter = stored_adapter

            except Exception:
                self.logger.exception("Could not introspect Adapters file:" + adapter_class.__module__)
Esempio n. 15
0
 def test_build_adapter_invalid(self):
     """
     Test flow for trying to build an adapter that does not inherit from ABCAdapter.
     """
     group = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
     with pytest.raises(IntrospectionException):
         ABCAdapter.build_adapter(group)
Esempio n. 16
0
    def _prepare_metric_operation(self, sim_operation):
        # type: (Operation) -> Operation
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)
        datatype_index = h5.REGISTRY.get_index_for_datatype(TimeSeries)
        time_series_index = dao.get_generic_entity(datatype_index, sim_operation.id, 'fk_from_operation')[0]
        ga = self.prepare_metadata(metric_algo.algorithm_category, time_series_index.fk_parent_burst)
        ga.visible = False

        view_model = get_class_by_name("{}.{}".format(MEASURE_METRICS_MODULE, MEASURE_METRICS_MODEL_CLASS))()
        view_model.time_series = time_series_index.gid
        view_model.algorithms = tuple(ALGORITHMS.keys())
        view_model.generic_attributes = ga

        parent_burst = dao.get_generic_entity(BurstConfiguration, time_series_index.fk_parent_burst, 'gid')[0]
        metric_op_group = dao.get_operationgroup_by_id(parent_burst.fk_metric_operation_group)
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = sim_operation.range_values
        view_model.operation_group_gid = uuid.UUID(metric_op_group.gid)
        view_model.ranges = json.dumps(parent_burst.ranges)
        view_model.range_values = range_values
        view_model.is_metric_operation = True
        metric_operation = Operation(view_model.gid.hex, sim_operation.fk_launched_by, sim_operation.fk_launched_in,
                                     metric_algo.id, user_group=ga.operation_tag, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)

        metrics_datatype_group = dao.get_generic_entity(DataTypeGroup, metric_operation_group_id,
                                                        'fk_operation_group')[0]
        if metrics_datatype_group.fk_from_operation is None:
            metrics_datatype_group.fk_from_operation = metric_operation.id
            dao.store_entity(metrics_datatype_group)

        self.store_view_model(metric_operation, sim_operation.project, view_model)
        return metric_operation
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        group = dao.get_algorithm_by_module('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        adapter_instance = ABCAdapter.build_adapter(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers
Esempio n. 18
0
    def editstructure(self, project_id=None, first_level=None, second_level=None,
                      filter_input="", visibility_filter=None, **_ignored):
        """
        Return the page skeleton for displaying the project structure.
        """
        try:
            int(project_id)
        except (ValueError, TypeError):
            raise cherrypy.HTTPRedirect('/project')

        if first_level is None or second_level is None:
            first_level, second_level = self.get_project_structure_grouping()

        selected_project = self.project_service.find_project(project_id)
        self._mark_selected(selected_project)
        data = self.project_service.get_filterable_meta()
        filters = StaticFiltersFactory.build_datatype_filters(selected=visibility_filter)
        tumor_creator_algorithm = dao.get_algorithm_by_module(TumorDatasetCreator.__module__,
                                                              TumorDatasetCreator.__name__)

        template_specification = dict(mainContent="project/structure",
                                      title=selected_project.name,
                                      project=selected_project, data=data,
                                      firstLevelSelection=first_level, secondLevelSelection=second_level,
                                      filterInputValue=filter_input, filters=filters,
                                      tumorCreatorAlgorithmId=tumor_creator_algorithm.id)
        return self.fill_default_attributes(template_specification, 'data')
Esempio n. 19
0
 def create_adapter(module='tvb.tests.framework.adapters.ndimensionarrayadapter',
                    class_name='NDimensionArrayAdapter'):
     """
     :returns: Adapter Class after initialization.
     """
     algorithm = dao.get_algorithm_by_module(module, class_name )
     return ABCAdapter.build_adapter(algorithm)
Esempio n. 20
0
    def create_operation(test_user=None,
                         test_project=None,
                         operation_status=STATUS_FINISHED):
        """
        Create persisted operation.
        :return: Operation entity after persistence.
        """
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        algorithm = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                TVB_IMPORTER_CLASS)
        adapter = ABCAdapter.build_adapter(algorithm)
        view_model = adapter.get_view_model_class()()
        view_model.data_file = "."
        operation = Operation(view_model.gid.hex,
                              test_user.id,
                              test_project.id,
                              algorithm.id,
                              status=operation_status)
        dao.store_entity(operation)
        op_dir = StorageInterface().get_project_folder(test_project.name,
                                                       str(operation.id))
        h5.store_view_model(view_model, op_dir)
        return dao.get_operation_by_id(operation.id)
Esempio n. 21
0
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        initialize_storage()

        algorithm = dao.get_algorithm_by_module(IntrospectionRegistry.SIMULATOR_MODULE,
                                                IntrospectionRegistry.SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
Esempio n. 22
0
 def test_prepare_adapter(self):
     """
     Test preparation of an adapter.
     """
     stored_adapter = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     assert isinstance(stored_adapter, model_operation.Algorithm), "Something went wrong with valid data!"
     adapter = self.flow_service.prepare_adapter(stored_adapter)
     assert isinstance(adapter, TestAdapter1), "Adapter incorrectly built"
     assert adapter.get_form_class() == TestAdapter1Form
     assert adapter.get_view_model() == TestModel
 def get_algorithm(self):
     """
     Return the algorithm saved in this entity.
     Should be used only if previously an algorithm was saved on this entity by
     calling 'set_algorithm' method.
     """
     if self.ALGO_INFO not in self._data_dict:
         return None
     return dao.get_algorithm_by_module(self._data_dict[self.ALGO_INFO]['module'],
                                        self._data_dict[self.ALGO_INFO]['class'])
Esempio n. 24
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)
    group = dao.get_algorithm_by_module(
        'tvb.adapters.uploaders.zip_connectivity_importer',
        'ZIPConnectivityImporter')
    importer = ABCAdapter.build_adapter(group)
    ### Launch Operation
    FlowService().fire_operation(importer,
                                 project.administrator,
                                 project_id,
                                 uploaded=zip_path)
Esempio n. 25
0
    def transactional_setup_method(self):
        """
        Reset the database before each test.
        """
        initialize_storage()

        algorithm = dao.get_algorithm_by_module(IntrospectionRegistry.SIMULATOR_MODULE,
                                                IntrospectionRegistry.SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.test_user = TestFactory.create_user("Simulator_Adapter_User")
        self.test_project = TestFactory.create_project(self.test_user, "Simulator_Adapter_Project")
Esempio n. 26
0
 def get_algorithm(self):
     """
     Return the algorithm saved in this entity.
     Should be used only if previously an algorithm was saved on this entity by
     calling 'set_algorithm' method.
     """
     if self.ALGO_INFO not in self._data_dict:
         return None
     return dao.get_algorithm_by_module(
         self._data_dict[self.ALGO_INFO]['module'],
         self._data_dict[self.ALGO_INFO]['class'])
Esempio n. 27
0
 def _build_adapter_from_declaration(adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ATT_TYPE]
     class_name = adapter_import_path.split('.')[-1]
     module_name = adapter_import_path.replace('.' + class_name, '')
     algo = dao.get_algorithm_by_module(module_name, class_name)
     if algo is not None:
         return ABCAdapter.build_adapter(algo)
     else:
         return None
 def build_adapter_from_declaration(cls, adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ABCAdapter.KEY_TYPE]
     class_name = adapter_import_path.split('.')[-1]
     module = adapter_import_path.replace('.' + class_name, '')
     algo = dao.get_algorithm_by_module(module, class_name)
     if algo is not None:
         return ABCAdapter.build_adapter(algo)
     else:
         return None
Esempio n. 29
0
 def ensure_db(self):
     """
     Ensure algorithm exists in DB and add it if not
     """
     cat = dao.get_uploader_categories()[0]
     cls = self.__class__
     cmd, cnm = cls.__module__, cls.__name__
     gp = dao.get_algorithm_by_module(cmd, cnm)
     if gp is None:
         gp = model.Algorithm(cmd, cnm, cat.id)
         gp = dao.store_entity(gp)
     self.stored_adapter = gp
Esempio n. 30
0
 def test_prepare_adapter(self):
     """
     Test preparation of an adapter.
     """
     stored_adapter = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     interface = self.flow_service.prepare_adapter(self.test_project.id, stored_adapter)
     assert isinstance(stored_adapter, model.Algorithm), "Something went wrong with valid data!"
     assert "name" in interface[0], "Bad interface created!"
     assert interface[0]["name"] == "test", "Bad interface!"
     assert "type" in interface[0], "Bad interface created!"
     assert interface[0]["type"] == "int", "Bad interface!"
     assert "default" in interface[0], "Bad interface created!"
     assert interface[0]["default"] == "0", "Bad interface!"
Esempio n. 31
0
 def test_prepare_adapter(self):
     """
     Test preparation of an adapter.
     """
     stored_adapter = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     interface = self.flow_service.prepare_adapter(self.test_project.id, stored_adapter)
     self.assertTrue(isinstance(stored_adapter, model.Algorithm), "Something went wrong with valid data!")
     self.assertTrue("name" in interface[0], "Bad interface created!")
     self.assertEquals(interface[0]["name"], "test", "Bad interface!")
     self.assertTrue("type" in interface[0], "Bad interface created!")
     self.assertEquals(interface[0]["type"], "int", "Bad interface!")
     self.assertTrue("default" in interface[0], "Bad interface created!")
     self.assertEquals(interface[0]["default"], "0", "Bad interface!")
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of operations.")
        self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")
        self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.")

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = model.Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.")
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.")
        self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")
Esempio n. 33
0
    def prepare_metrics_operation(operation):
        # TODO reuse from OperationService and do not duplicate logic here
        parent_burst = dao.get_generic_entity(BurstConfiguration, operation.fk_operation_group, 'fk_operation_group')[0]
        metric_operation_group_id = parent_burst.fk_metric_operation_group
        range_values = operation.range_values
        metric_algo = dao.get_algorithm_by_module(MEASURE_METRICS_MODULE, MEASURE_METRICS_CLASS)

        metric_operation = Operation(None, operation.fk_launched_by, operation.fk_launched_in, metric_algo.id,
                                     status=STATUS_FINISHED, op_group_id=metric_operation_group_id,
                                     range_values=range_values)
        metric_operation.visible = False
        metric_operation = dao.store_entity(metric_operation)
        op_dir = StorageInterface().get_project_folder(operation.project.name, str(metric_operation.id))
        return op_dir, metric_operation
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        ids = []
        for datatype in array_wrappers:
            ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        parameters = json.dumps({"param_5": "1", "param_1": array_wrappers[0][2],
                                 "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0"})
        operation = Operation(self.test_user.id, self.test_project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        assert len(inputs) == 2
        assert ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."
        assert not ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType."

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        assert len(inputs) == 3, "Incorrect number of operations."
        assert ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."
        assert ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."
        assert ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType."

        project, dt_group_id, first_dt, _ = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid})
        operation = Operation(self.test_user.id, project.id, algo.id, parameters)
        operation = dao.store_entity(operation)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.relevant_filter)
        assert len(inputs) == 0, "Incorrect number of dataTypes."
        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(operation.gid,
                                                                                          self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert inputs[0].id == dt_group_id, "Wrong dataType."
        assert inputs[0].id != first_dt.id, "Wrong dataType."
Esempio n. 35
0
    def build_adapter_from_class(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        if not issubclass(adapter_class, ABCAdapter):
            raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
        try:
            stored_adapter = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance
        except Exception as excep:
            LOGGER.exception(excep)
            raise IntrospectionException(str(excep))
Esempio n. 36
0
    def build_adapter_from_class(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        if not issubclass(adapter_class, ABCAdapter):
            raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
        try:
            stored_adapter = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance
        except Exception, excep:
            LOGGER.exception(excep)
            raise IntrospectionException(str(excep))
Esempio n. 37
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
    def setUp(self):
        """
        Reset the database before each test.
        """
        initialize_storage()
        self.datatypes_factory = DatatypesFactory()
        self.test_user = self.datatypes_factory.get_user()
        self.test_project = self.datatypes_factory.get_project()
        self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1]

        algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS)
        self.simulator_adapter = ABCAdapter.build_adapter(algorithm)
        self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project,
                                                      model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS))

        SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
Esempio n. 39
0
    def _export_linked_datatypes(self, project):
        linked_paths = ProjectService().get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return None, None

        # Make an import operation which will contain links to other projects
        algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        op = model_operation.Operation(None, None, project.id, algo.id)
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model_operation.STATUS_FINISHED)

        return linked_paths, op
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of dataTypes."
        assert not first_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert not second_dt.id == inputs[0].id, "Retrieved wrong dataType."
        assert dt_group_id == inputs[0].id, "Retrieved wrong dataType."
    def test_get_inputs_for_op_group(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The DataType inputs will be from a DataType group.
        """
        project, dt_group_id, first_dt, second_dt = self._create_datatype_group()
        first_dt.visible = False
        dao.store_entity(first_dt)
        second_dt.visible = False
        dao.store_entity(second_dt)

        op_group = model.OperationGroup(project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "1", "param_1": first_dt.gid, "param_6": "2"})
        params_2 = json.dumps({"param_5": "1", "param_4": second_dt.gid, "param_6": "5"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 0)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        first_dt.visible = True
        dao.store_entity(first_dt)

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.")
        self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.")
        self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
Esempio n. 42
0
 def create_workflow_step(module, classname, static_kwargs=None, dynamic_kwargs=None,
                          step_index=0, base_step=0, tab_index=0, index_in_tab=0, is_view_step=False):
     """
     Build non-persisted WorkflowStep entity.
     """
     if static_kwargs is None:
         static_kwargs = {}
     if dynamic_kwargs is None:
         dynamic_kwargs = {}
     algorithm = dao.get_algorithm_by_module(module, classname)
     second_step_configuration = wf_cfg(algorithm.id, static_kwargs, dynamic_kwargs)
     
     static_params = second_step_configuration.static_params
     dynamic_params = second_step_configuration.dynamic_params
     for entry in dynamic_params:
         dynamic_params[entry][wf_cfg.STEP_INDEX_KEY] += base_step
      
     if is_view_step:
         return model.WorkflowStepView(algorithm_id=algorithm.id, tab_index=tab_index, index_in_tab=index_in_tab,
                                       static_param=static_params, dynamic_param=dynamic_params)
     return model.WorkflowStep(algorithm_id=algorithm.id, step_index=step_index, tab_index=tab_index,
                               index_in_tab=index_in_tab, static_param=static_params, dynamic_param=dynamic_params)
    def test_get_inputs_for_op_group_simple_inputs(self):
        """
        Tests method get_datatypes_inputs_for_operation_group.
        The dataType inputs will not be part of a dataType group.
        """
        #it's a list of 3 elem.
        array_wrappers = self._create_mapped_arrays(self.test_project.id)
        array_wrapper_ids = []
        for datatype in array_wrappers:
            array_wrapper_ids.append(datatype[0])

        datatype = dao.get_datatype_by_id(array_wrapper_ids[0])
        datatype.visible = False
        dao.store_entity(datatype)

        op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]")
        op_group = dao.store_entity(op_group)
        params_1 = json.dumps({"param_5": "2", "param_1": array_wrappers[0][2],
                               "param_2": array_wrappers[1][2], "param_6": "7"})
        params_2 = json.dumps({"param_5": "5", "param_3": array_wrappers[2][2],
                               "param_2": array_wrappers[1][2], "param_6": "6"})

        algo = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')

        op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id)
        op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id)
        dao.store_entities([op1, op2])

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.relevant_filter)
        self.assertEqual(len(inputs), 2)
        self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.")

        inputs = self.project_service.get_datatypes_inputs_for_operation_group(op_group.id, self.full_filter)
        self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.")
        self.assertTrue(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id])
        self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     adapter = TestFactory.create_adapter(module, class_name)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                     tmp_folder, **data)
     assert res.index("has finished.") > 10, "Operation didn't finish"
     group = dao.get_algorithm_by_module(module, class_name)
     assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
     assert group.classname == 'TestAdapter1', "Wrong data stored."
     dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     assert count == 1
     assert len(dts) == 1
     datatype = dao.get_datatype_by_id(dts[0][0])
     assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
     assert datatype.type == output_type, "Wrong data stored."
## Before starting this, we need to have TVB web interface launched at least once (to have a default project, user, etc)
if __name__ == "__main__":

    flow_service = FlowService()
    operation_service = OperationService()

    ## This ID of a project needs to exists in Db, and it can be taken from the WebInterface:
    project = dao.get_project_by_id(1)

    ## This is our new added Importer:
    adapter_instance = FooDataImporter()
    ## We need to store a reference towards the new algorithms also in DB:
    # First select the category of uploaders:
    upload_category = dao.get_uploader_categories()[0]
    # check if the algorithm has been added in DB already
    algorithm = dao.get_algorithm_by_module(FooDataImporter.__module__, FooDataImporter.__name__)
    if algorithm is None:
        # not stored in DB previously, we will store it now:
        algorithm = Algorithm(FooDataImporter.__module__, FooDataImporter.__name__, upload_category.id)
        algorithm = dao.store_entity(algorithm)

    adapter_instance.stored_adapter = algorithm

    ## Prepare the input algorithms as if they were coming from web UI submit:
    #launch_args = {"array_data": "[1, 2, 3, 4, 5]"}
    launch_args = {"array_data": "demo_array.txt"}

    ## launch an operation and have the results sotored both in DB and on disk
    launched_operations = flow_service.fire_operation(adapter_instance,
                                                      project.administrator,
                                                      project.id,
Esempio n. 46
0
 def test_build_adapter_invalid(self):
     """
     Test flow for trying to build an adapter that does not inherit from ABCAdapter.
     """
     group = dao.get_algorithm_by_module(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_INVALID_CLASS)
     self.assertRaises(IntrospectionException, ABCAdapter.build_adapter, group)
 def test_get_simple_adapter_interface(self):
     adapter = dao.get_algorithm_by_module('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     result = self.flow_c.get_simple_adapter_interface(adapter.id)
     expected_interface = TestAdapter1().get_input_tree()
     self.assertEqual(result['inputList'], expected_interface)
Esempio n. 48
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)
    group = dao.get_algorithm_by_module('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
    importer = ABCAdapter.build_adapter(group)
    ### Launch Operation
    FlowService().fire_operation(importer, project.administrator, project_id, uploaded=zip_path)
 def get_algorithm_by_module_and_class(module, classname):
     """
     Get the db entry from the algorithm table for the given module and 
     class.
     """
     return dao.get_algorithm_by_module(module, classname)