Esempio n. 1
0
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter("tvb.adapters.uploaders.cff_importer", "CFF_Importer")
        args = {"cff": cff_path, DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2()
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        test_subject = "test"
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: test_subject
        }

        # Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".subject"],
                                  operations=["=="],
                                  values=[test_subject])
        region_mapping = self._get_entity(RegionMapping, data_filter)

        return region_mapping
 def test_adapter_memory(self):
     """
     Test that a method not implemented exception is raised in case the
     get_required_memory_size method is not implemented.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
     self.assertEqual(42, adapter.get_required_memory_size())
Esempio n. 5
0
    def _import_csv_test_connectivity(self, reference_connectivity_gid,
                                      subject):

        ### First prepare input data:
        data_dir = path.abspath(path.dirname(tvb_data.__file__))

        torronto_dir = path.join(data_dir, 'dti_pipeline', 'Output_Toronto')
        weights = path.join(torronto_dir,
                            'output_ConnectionCapacityMatrix.csv')
        tracts = path.join(torronto_dir, 'output_ConnectionDistanceMatrix.csv')
        weights_tmp = weights + '.tmp'
        tracts_tmp = tracts + '.tmp'
        self.helper.copy_file(weights, weights_tmp)
        self.helper.copy_file(tracts, tracts_tmp)

        ### Find importer and Launch Operation
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.csv_connectivity_importer',
            'CSVConnectivityImporter')
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     weights=weights_tmp,
                                     tracts=tracts_tmp,
                                     Data_Subject=subject,
                                     input_data=reference_connectivity_gid)
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.gifti_surface_importer',
            'GIFTISurfaceImporter')

        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: ""
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, surface.module + "." + surface.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user, self.test_project.id, adapter,
                          tmp_folder, **data)
        self._assert_stored_dt2()
Esempio n. 8
0
    def test_import_bold(self):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.mat_timeseries_importer',
            'MatTimeSeriesImporter')

        args = dict(
            data_file=self.bold_path,
            dataset_name='QL_20120824_DK_BOLD_timecourse',
            structure_path='',
            transpose=False,
            slice=None,
            sampling_rate=1000,
            start_time=0,
            tstype='region',
            tstype_parameters_option_region_connectivity=self.connectivity.gid,
            Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter3",
            "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter3",
         "TestAdapterHDDRequired")
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         adapter.stored_adapter.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user, self.test_project.id, adapter,
                       tmp_folder, **data)
     self._assert_no_dt2()
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.test_user = TestFactory.create_user("UserPM")
        self.test_project = TestFactory.create_project(self.test_user)

        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__),
                                'eeg_brainstorm_65.txt')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   Sensors_Importer.EEG_SENSORS)

        zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                                'cortex_16384.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, CORTICAL, True)

        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        self.assertTrue(self.sensors is not None)

        self.importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixSurfaceEEGImporter')
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRI,
                connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: "bla bla",
            'apply_corrections': True,
            'connectivity': connectivity
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        self.assertEqual(1, count,
                         "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
Esempio n. 13
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        ### Make sure Connectivity is in DB
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = dao.get_generic_entity(Connectivity, 'John Doe',
                                                   'subject')[0]

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        self.algo_groups = dao.get_generic_entity(model.AlgorithmGroup,
                                                  'MatlabAdapter', 'classname')

        self.assertTrue(self.algo_groups is not None)
        self.assertEquals(6, len(self.algo_groups))
        self.bct_adapters = []
        for group in self.algo_groups:
            self.bct_adapters.append(
                TestFactory.create_adapter(group, self.test_project))
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     adapter = TestFactory.create_adapter(module, class_name)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.get_algorithm_by_module(module, class_name)
     self.assertEqual(group.module,
                      'tvb.tests.framework.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id,
                                             Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Esempio n. 16
0
 def test_build_adapter_instance(self):
     """
     Test standard flow for building an adapter instance.
     """
     adapter = TestFactory.create_adapter(TEST_ADAPTER_VALID_MODULE,
                                          TEST_ADAPTER_VALID_CLASS)
     self.assertTrue(isinstance(adapter, ABCSynchronous),
                     "Something went wrong with valid data!")
Esempio n. 17
0
 def _launch_test_algo_on_cluster(self, **data):
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
Esempio n. 18
0
 def test_fire_operation(self):
     """
     Test preparation of an adapter and launch mechanism.
     """
     adapter = TestFactory.create_adapter(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     data = {"test": 5}
     result = self.flow_service.fire_operation(adapter, self.test_user, self.test_project.id, **data)
     self.assertTrue(result.endswith("has finished."), "Operation fail")
 def _launch_test_algo_on_cluster(self, **data):
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
Esempio n. 20
0
    def _import_connectivity(self):
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_connectivity_importer',
                                              'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
Esempio n. 21
0
 def test_fire_operation(self):
     """
     Test preparation of an adapter and launch mechanism.
     """
     adapter = TestFactory.create_adapter(TEST_ADAPTER_VALID_MODULE,
                                          TEST_ADAPTER_VALID_CLASS)
     data = {"test": 5}
     result = self.flow_service.fire_operation(adapter, self.test_user,
                                               self.test_project.id, **data)
     self.assertTrue(result.endswith("has finished."), "Operation fail")
    def import_test_connectivity96(test_user, test_project, subject=DataTypeMetaData.DEFAULT_SUBJECT):
        """
        Import a connectivity with 96 regions from tvb_data.
        """
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_connectivity_importer',
                                              'ZIPConnectivityImporter')

        data_dir = path.abspath(path.dirname(tvb_data.__file__))
        zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id, uploaded=zip_path, Data_Subject=subject)
 def test_launch_operation_HDD_full_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                       self.test_project.id, adapter, tmp_folder, **data)
     self._assert_no_dt2()
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
Esempio n. 25
0
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.cff_importer', 'CFF_Importer')
        args = {
            'cff': cff_path,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
Esempio n. 26
0
    def _import(self, import_file_name):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.connectivity_measure_importer',
                                              'ConnectivityMeasureImporter')
        path = os.path.join(os.path.dirname(test_data.__file__), import_file_name)

        args = {'data_file': path,
                'connectivity': self.connectivity.gid,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
    def test_adapter_huge_memory_requirement(self):
        """
        Test that an MemoryException is raised in case adapter cant launch due to lack of memory.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3",
                                             "TestAdapterHugeMemoryRequired")
        data = {"test": 5}

        operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id,
                                    json.dumps(data), json.dumps({}), status=model.STATUS_STARTED)
        operation = dao.store_entity(operation)
        self.assertRaises(NoMemoryAvailableException, OperationService().initiate_prelaunch, operation, adapter, {})
Esempio n. 28
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
     data = {"test": 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED, "Operation should have been canceled!")
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.import_service = ImportService()
     self.flow_service = FlowService()
     self.project_service = ProjectService()
     
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc")
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
     self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.zip_path = None 
 def test_launch_operation_HDD_with_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
     started_operation = model.Operation(self.test_user.id, self.test_project.id, adapter.stored_adapter.id, "",
                                         status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
     self._assert_stored_dt2()
Esempio n. 32
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
                'zero_based_triangles': True,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
        args = {'uploaded': import_file_path, 'surface_type': OUTER_SKULL,
                'zero_based_triangles': True,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, SkullSkin)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
    def test_import_bold(self):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.mat_timeseries_importer', 'MatTimeSeriesImporter')

        args = dict(data_file=self.bold_path, dataset_name='QL_20120824_DK_BOLD_timecourse', structure_path='',
                    transpose=False, slice=None, sampling_rate=1000, start_time=0,
                    tstype='region',
                    tstype_parameters_option_region_connectivity=self.connectivity.gid,
                    Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
 def test_stop_operation(self):
     """
     Test that an operation is successfully stopped.
     """
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter2", "TestAdapter2")
     data = {"test": 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_CANCELED,
                      "Operation should have been canceled!")
Esempio n. 36
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
 def test_launch_operation_HDD_full_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter3",
         "TestAdapterHDDRequired")
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user, self.test_project.id, adapter,
                       tmp_folder, **data)
     self._assert_no_dt2()
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED, "Operation shouldn't have been canceled!")
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 0,
                         "There should be no operation")

        adapter_instance = TestFactory.create_adapter(
            'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user,
                                    self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 1,
                         "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2,
                         "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None,
                             "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        self.assertTrue(
            len(resulted_datatypes) >= 2,
            "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id,
                         "DataTypeGroup is incorrect")
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {'data_file': import_file_path,
                "surface_type": FACE,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)
Esempio n. 43
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
Esempio n. 44
0
    def setUp(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        self.assertEqual(0, result, "There should be no data type in DB")
        result = self.count_all_entities(Project)
        self.assertEqual(0, result)

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
Esempio n. 45
0
    def import_test_connectivity96(test_user,
                                   test_project,
                                   subject=DataTypeMetaData.DEFAULT_SUBJECT):
        """
        Import a connectivity with 96 regions from tvb_data.
        """
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')

        data_dir = path.abspath(path.dirname(tvb_data.__file__))
        zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
        ### Launch Operation
        FlowService().fire_operation(importer,
                                     test_user,
                                     test_project.id,
                                     uploaded=zip_path,
                                     Data_Subject=subject)
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       name="GeneratedProject",
                                                       description="test_desc")
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.zip_path = None
Esempio n. 47
0
    def _import_csv_test_connectivity(self, reference_connectivity_gid, subject):

        ### First prepare input data:
        data_dir = path.abspath(path.dirname(tvb_data.__file__))

        torronto_dir = path.join(data_dir, 'dti_pipeline', 'Output_Toronto')
        weights = path.join(torronto_dir, 'output_ConnectionCapacityMatrix.csv')
        tracts = path.join(torronto_dir, 'output_ConnectionDistanceMatrix.csv')
        weights_tmp = weights + '.tmp'
        tracts_tmp = tracts + '.tmp'
        self.helper.copy_file(weights, weights_tmp)
        self.helper.copy_file(tracts, tracts_tmp)

        ### Find importer and Launch Operation
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.csv_connectivity_importer',
                                              'CSVConnectivityImporter')
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     weights=weights_tmp, tracts=tracts_tmp, Data_Subject=subject,
                                     input_data=reference_connectivity_gid)
    def test_import(self):

        count_before = self.count_all_entities(Connectivity)
        self.assertEqual(0, count_before)

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.networkx_importer',
                                              'NetworkxConnectivityImporter')
        args = {'data_file': self.upload_file,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        count_after = self.count_all_entities(Connectivity)
        self.assertEqual(1, count_after)

        conn = self.get_all_entities(Connectivity)[0]
        self.assertEqual(83, conn.number_of_regions)
    def _import(self, import_file_path=None, expected_result_class=StructuralMRI, connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': True, 'connectivity': connectivity}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id, expected_result_class, None)
        self.assertEqual(1, count, "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
 def test_stop_operation_finished(self):
     """
     Test that an operation that is already finished is not changed by the stop operation.
     """
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
     data = {"test1_val1": 5, 'test1_val2': 5}
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     operation = dao.get_operation_by_id(operations[0].id)
     operation.status = model.STATUS_FINISHED
     dao.store_entity(operation)
     self.operation_service.stop_operation(operations[0].id)
     operation = dao.get_operation_by_id(operations[0].id)
     self.assertEqual(operation.status, model.STATUS_FINISHED,
                      "Operation shouldn't have been canceled!")
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.test_user = TestFactory.create_user("UserPM")
        self.test_project = TestFactory.create_project(self.test_user)

        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'eeg_brainstorm_65.txt')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.EEG_SENSORS)

        zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, CORTICAL, True)

        self.surface = TestFactory.get_entity(self.test_project, CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        self.assertTrue(self.sensors is not None)

        self.importer = TestFactory.create_adapter('tvb.adapters.uploaders.projection_matrix_importer',
                                                   'ProjectionMatrixSurfaceEEGImporter')
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
        data = {"test": 100}

        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter, tmp_folder, **data)
        self._assert_stored_dt2()
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
Esempio n. 54
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user, "BCT-Project")
        ### Make sure Connectivity is in DB
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = dao.get_generic_entity(Connectivity, 'John Doe', 'subject')[0]

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        self.algo_groups = dao.get_generic_entity(model.AlgorithmGroup, 'MatlabAdapter', 'classname')

        self.assertTrue(self.algo_groups is not None)
        self.assertEquals(6, len(self.algo_groups))
        self.bct_adapters = []
        for group in self.algo_groups:
            self.bct_adapters.append(TestFactory.create_adapter(group, self.test_project))
 def _import(self, import_file_path, surface_gid, connectivity_gid):
     """
     This method is used for importing region mappings
     :param import_file_path: absolute path of the file to be imported
     """
         
     # Retrieve Adapter instance
     test_subject = "test"
     importer = TestFactory.create_adapter('tvb.adapters.uploaders.region_mapping_importer',
                                           'RegionMapping_Importer')
     args = {'mapping_file': import_file_path, 'surface': surface_gid,
             'connectivity': connectivity_gid,
             DataTypeMetaData.KEY_SUBJECT: test_subject}
     
     # Launch import Operation
     FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
          
     # During setup we import a CFF which creates an additional RegionMapping
     # So, here we have to find our mapping (just imported)   
     data_filter = FilterChain(fields=[FilterChain.datatype + ".subject"], operations=["=="], values=[test_subject])
     region_mapping = self._get_entity(RegionMapping, data_filter)
     
     return region_mapping
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')

        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: ""}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           surface.module + "." + surface.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface