def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
예제 #2
0
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
예제 #3
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, 'tvb_test.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
예제 #4
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        #        self.reset_database()
        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        algo = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo1 = dao.store_entity(algo)
        algo = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        dao.store_entity(algo)
        algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ2.id)
        adapter = dao.store_entity(algo)

        algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='')
        self.algo_inst = dao.store_entity(algo)
        algo = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        dao.store_entity(algo)
        algo = model.Algorithm(self.algo1.id, 'id', name='', req_data='', param_name='', output='')
        self.algo_inst = dao.store_entity(algo)
예제 #5
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         result_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user,
                       self.test_project.id,
                       adapter,
                       tmp_folder,
                       method_name=ABCAdapter.LAUNCH_METHOD,
                       **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 0)
예제 #6
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, time_series.module + "." + time_series.type)
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "TimeSeries should not be none")

        return time_series
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
예제 #8
0
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)
        self.version_info = None

        self.user_service = UserService()
        self.flow_service = FlowService()

        analyze_category = self.flow_service.get_launchable_non_viewers()
        self.analyze_category_link = '/flow/step/' + str(analyze_category.id)
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(
            CONNECTIVITY_MODULE, CONNECTIVITY_CLASS)[1].id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        local_connectivity_link = '/spatial/localconnectivity/step_1/1'

        connectivity_submenu = [
            dict(title="Large Scale Connectivity",
                 subsection="connectivity",
                 description=
                 "View Connectivity Regions. Perform Connectivity lesions",
                 link=connectivity_link),
            dict(title="Local Connectivity",
                 subsection="local",
                 link=local_connectivity_link,
                 description=
                 "Create or view existent Local Connectivity entities.")
        ]
        self.connectivity_submenu = connectivity_submenu
 def __init__(self):
     base.BaseController.__init__(self)
     self.flow_service = FlowService()
     self.logger = get_logger(__name__)
     editable_entities = [dict(link='/spatial/stimulus/region/step_1_submit/1/1', title='Region Stimulus',
                               subsection='regionstim', description='Create a new Stimulus on Region level'),
                          dict(link='/spatial/stimulus/surface/step_1_submit/1/1', title='Surface Stimulus',
                               subsection='surfacestim', description='Create a new Stimulus on Surface level')]
     self.submenu_list = editable_entities
예제 #10
0
 def setUp(self):
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
예제 #11
0
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")
        self.assertEqual(len(connectivities), 1)
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) == len(
                ts_metric_adapter.available_algorithms.keys()),
            "A result should have been generated for every metric.")
예제 #12
0
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)
        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)

        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 0)

        self.operation_service.initiate_operation(
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            method_name=ABCAdapter.LAUNCH_METHOD,
            **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                         "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       Connectivity())
        group = dao.find_group(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'connectivity_regions_96.zip')
        args = {'uploaded': zip_path}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      Connectivity())
        self.assertTrue(dt_count_after == dt_count_before + 1)
예제 #15
0
 def test_happy_flow_region_import(self):
     dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                    ProjectionRegionEEG())
     group = dao.find_group(
         'tvb.adapters.uploaders.projection_matrix_importer',
         'ProjectionMatrixRegionEEGImporter')
     importer = ABCAdapter.build_adapter(group)
     importer.meta_data = {
         DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     zip_path = os.path.join(
         os.path.abspath(os.path.dirname(dataset.__file__)),
         'region_conn_74_eeg_1020_62.mat')
     args = {
         'projection_file': zip_path,
         'dataset_name': 'ProjectionMatrix',
         'connectivity': self.connectivity.gid,
         'sensors': self.sensors.gid
     }
     FlowService().fire_operation(importer, self.test_user,
                                  self.test_project.id, **args)
     dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                   ProjectionRegionEEG())
     self.assertTrue(dt_count_after == dt_count_before + 1)
예제 #16
0
 def get_entity_count(project, datatype):
     """
     Return the count of stored datatypes with class given by @param datatype
     """
     data_types = FlowService().get_available_datatypes(
         project.id, datatype.module + "." + datatype.type)
     return len(data_types)
예제 #17
0
    def import_cff(cff_path=None, test_user=None, test_project=None):
        """
        This method is used for importing a CFF data-set (load CFF_Importer, launch it).
        :param cff_path: absolute path where CFF file exists. When None, a default CFF will be used.
        :param test_user: optional persisted User instance, to use as Oeration->launcher
        :param test_project: optional persisted Project instance, to use for launching Operation in it. 
        """
        ### Prepare Data
        if cff_path is None:
            cff_path = os.path.join(os.path.dirname(cff_dataset.__file__),
                                    'dataset_74.cff')
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer',
                               'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        args = {'cff': cff_path}

        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id,
                                     **args)
예제 #18
0
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     #        self.clean_database()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.old_config_file = cfg.CURRENT_DIR
     cfg.CURRENT_DIR = os.path.dirname(tvb_test.__file__)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
예제 #19
0
 def _burst_create_connectivity(self):
     """
     Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator).
     TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory
     once that is done.
     """
     meta = {
         DataTypeMetaData.KEY_SUBJECT: "John Doe",
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     algorithm, algo_group = FlowService(
     ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.operation = model.Operation(self.test_user.id,
                                      self.test_project.id,
                                      algo_group.id,
                                      json.dumps(''),
                                      meta=json.dumps(meta),
                                      status="STARTED",
                                      method_name=ABCAdapter.LAUNCH_METHOD)
     self.operation = dao.store_entity(self.operation)
     storage_path = FilesHelper().get_project_folder(
         self.test_project, str(self.operation.id))
     connectivity = Connectivity(storage_path=storage_path)
     connectivity.weights = numpy.ones((74, 74))
     connectivity.centres = numpy.ones((74, 3))
     adapter_instance = StoreAdapter([connectivity])
     OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                           {})
     return algorithm.id, connectivity
예제 #20
0
 def get_entity(project, expected_data, filters=None):
     """
     Return the first entity with class given by @param expected_data
     """
     data_types = FlowService().get_available_datatypes(
         project.id, expected_data.module + "." + expected_data.type,
         filters)
     entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
     return entity
예제 #21
0
 def test_zip_import(self):
     """
         This method tests import of TVB data in zip format (which imply multiple data types
         in the same zip file - exported from a group)
     """
     self._import(self.zip_file_path)
     data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                        self.datatype.module + "." + self.datatype.type)
     self.assertEqual(3, len(data_types), "3 datatypes should have been imported from group.")
예제 #22
0
    def create_adapter(
        algo_group=None,
        test_project=None,
    ):
        """
        :return: Adapter Class after initialization.
        """
        if algo_group is None:
            algo_group = dao.find_group(
                'tvb_test.adapters.ndimensionarrayadapter',
                'NDimensionArrayAdapter')

        if test_project is None:
            test_user = TestFactory.create_user()
            test_project = TestFactory.create_project(test_user)

        group, _ = FlowService().prepare_adapter(test_project.id, algo_group)
        return FlowService().build_adapter_instance(group)
예제 #23
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.flow_service = FlowService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.default_model = models_module.Generic2dOscillator()

        all_connectivities = self.flow_service.get_available_datatypes(
            self.test_project.id, Connectivity)
        self.connectivity = ABCAdapter.load_entity_by_gid(
            all_connectivities[0][2])
        self.connectivity.number_of_regions = 74
        self.context_model_param = ContextModelParameters(
            self.connectivity, self.default_model)
예제 #24
0
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(
            test_project=self.test_project)
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.zip_path = None
예제 #25
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(
            test_project=self.test_project)

        result = self.get_all_datatypes()
        self.assertEqual(len(result), 0, "There should be no data type in DB")
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
예제 #26
0
    def _get_entity(self, expected_data, filters=None):
        data_types = FlowService().get_available_datatypes(
            self.test_project.id,
            expected_data.module + "." + expected_data.type, filters)
        self.assertEqual(
            1, len(data_types), "Project should contain only one data type:" +
            str(expected_data.type))

        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(entity is not None, "Instance should not be none")

        return entity
예제 #27
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer',
                            'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     importer.meta_data = {
         DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
 def setUp(self):
     """
     Reset the database before each test.
     """
     initialize_storage()
     user = model.User("test_user", "test_pass", "*****@*****.**", True, "user")
     self.test_user = dao.store_entity(user) 
     data = dict(name='test_proj', description='desc', users=[])
     self.test_project = ProjectService().store_project(self.test_user, True, None, **data)
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
             DataTypeMetaData.KEY_STATE: "RAW"}
     algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS)
     self.simulator_adapter = FlowService().build_adapter_instance(algo_group)
                                         
     self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, 
                                      json.dumps(SIMULATOR_PARAMETERS), 
                                      meta=json.dumps(meta), status=model.STATUS_STARTED,
                                      method_name=ABCAdapter.LAUNCH_METHOD)
     self.operation = dao.store_entity(self.operation)
     
     SIMULATOR_PARAMETERS['connectivity'] = self._create_connectivity(self.CONNECTIVITY_NODES)
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group('tvb_test.adapters.testadapter3', 'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {'first_range': 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
예제 #30
0
 def test_full_import(self):
     """
     Test that importing a CFF generates at least one DataType in DB.
     """
     all_dt = self.get_all_datatypes()
     self.assertEqual(0, len(all_dt))
     TestFactory.import_cff(cff_path=self.VALID_CFF,
                            test_user=self.test_user,
                            test_project=self.test_project)
     flow_service = FlowService()
     ### Check that at one Connectivity was persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.connectivity.Connectivity')
     self.assertEquals(len(gid_list), 1)
     ### Check that at one RegionMapping was persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces.RegionMapping')
     self.assertEquals(len(gid_list), 1)
     ### Check that at one LocalConnectivity was persisted
     gids = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces.LocalConnectivity')
     self.assertEquals(len(gids), 1)
     connectivity = dao.get_datatype_by_gid(gids[0][2])
     metadata = connectivity.get_metadata()
     self.assertEqual(metadata['Cutoff'], '40.0')
     self.assertEqual(metadata['Equation'], 'null')
     self.assertFalse(metadata['Invalid'])
     self.assertFalse(metadata['Is_nan'])
     self.assertEqual(metadata['Type'], 'LocalConnectivity')
     ### Check that at 2 Surfaces were persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces_data.SurfaceData')
     self.assertEquals(len(gid_list), 2)