def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data
     )
     self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
     self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Exemplo n.º 2
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, 'tvb_test.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Exemplo n.º 3
0
 def build_adapter_from_declaration(cls, adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ABCAdapter.KEY_TYPE]
     class_name = adapter_import_path.split(".")[-1]
     module = adapter_import_path.replace("." + class_name, "")
     if "initparam" in adapter_declaration:
         algo_group = dao.find_group(module, class_name, adapter_declaration["initparam"])
     else:
         algo_group = dao.find_group(module, class_name)
     if algo_group is not None:
         return ABCAdapter.build_adapter(algo_group), algo_group
     else:
         return None, None
Exemplo n.º 4
0
 def build_adapter_from_declaration(cls, adapter_declaration):
     """
     Build and adapter from the declaration in the portlets xml.
     """
     adapter_import_path = adapter_declaration[ABCAdapter.KEY_TYPE]
     class_name = adapter_import_path.split('.')[-1]
     module = adapter_import_path.replace('.' + class_name, '')
     if 'initparam' in adapter_declaration:
         algo_group = dao.find_group(module, class_name, adapter_declaration['initparam'])
     else:
         algo_group = dao.find_group(module, class_name)
     if algo_group is not None:
         return ABCAdapter.build_adapter(algo_group), algo_group
     else:
         return None, None
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {DataTypeMetaData.KEY_SUBJECT: "",
                              DataTypeMetaData.KEY_STATE: "RAW"}

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 2)
        datatype = dao.get_datatype_by_id(dts[1][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
    def _create_mapped_arrays(self, project_id):
        """
        :param project_id: the project in which the arrays are created
        :return: a list of dummy `MappedArray`
        """
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 0)
        
        algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
        group, _ = self.flow_service.prepare_adapter(project_id, algo_group)

        adapter_instance = self.flow_service.build_adapter_instance(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 1)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        count = self.flow_service.get_available_datatypes(project_id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 2)
        
        self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data)
        array_wrappers, count = self.flow_service.get_available_datatypes(project_id,
                                                                          "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(count, 3)

        return array_wrappers
Exemplo n.º 9
0
    def _import_csv_test_connectivity(self, reference_connectivity_gid,
                                      subject):

        ### First prepare input data:
        data_dir = path.abspath(path.dirname(tvb_data.__file__))

        torronto_dir = path.join(data_dir, 'dti_pipeline', 'Output_Toronto')
        weights = path.join(torronto_dir,
                            'output_ConnectionCapacityMatrix.csv')
        tracts = path.join(torronto_dir, 'output_ConnectionDistanceMatrix.csv')
        weights_tmp = weights + '.tmp'
        tracts_tmp = tracts + '.tmp'
        self.helper.copy_file(weights, weights_tmp)
        self.helper.copy_file(tracts, tracts_tmp)

        ### Find importer and Launch Operation
        group = dao.find_group(
            'tvb.adapters.uploaders.csv_connectivity_importer',
            'CSVConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     weights=weights_tmp,
                                     tracts=tracts_tmp,
                                     Data_Subject=subject,
                                     input_data=reference_connectivity_gid)
    def test_wrong_shape(self):
        """
        Verifies that importing a different shape throws exception
        """
        group = dao.find_group(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixSurfaceEEGImporter')
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'surface_reg_13_eeg_62.mat')
        args = {
            'projection_file': file_path,
            'dataset_name': 'ProjectionMatrix',
            'sensors': self.sensors.gid,
            'surface': self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        try:
            FlowService().fire_operation(importer, self.test_user,
                                         self.test_project.id, **args)
            self.fail(
                "This was expected not to run! 62 rows in proj matrix, but 65 sensors"
            )
        except OperationException:
            pass
Exemplo n.º 11
0
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group(
            'tvb.adapters.uploaders.region_mapping_importer',
            'RegionMapping_Importer')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'mapping_file': import_file_path,
            'surface': surface_gid,
            'connectivity': connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test"
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(
            fields=[FilterChain.datatype + ".create_date"],
            operations=[">"],
            values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
Exemplo n.º 12
0
 def create_operation(algorithm=None, test_user=None, test_project=None, 
                      operation_status=model.STATUS_FINISHED, parameters="test params"):
     """
     Create persisted operation.
     
     :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
     :return: Operation entity after persistence. 
     """
     if algorithm is None:
         algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
         algorithm = dao.get_algorithm_by_group(algo_group.id)
         
     if test_user is None:
         test_user = TestFactory.create_user()
         
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
         
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
             DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                 status=operation_status, method_name=ABCAdapter.LAUNCH_METHOD)
     dao.store_entity(operation)
     ### Make sure lazy attributes are correctly loaded.
     return dao.get_operation_by_id(operation.id)
Exemplo n.º 13
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRI,
                connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: "bla bla",
            'apply_corrections': True,
            'connectivity': connectivity
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        self.assertEqual(1, count,
                         "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
Exemplo n.º 15
0
 def test_build_adapter_instance(self):
     """
     Test standard flow for building an adapter instance.
     """
     algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     adapter = ABCAdapter.build_adapter(algo_group)
     self.assertTrue(isinstance(adapter, ABCSynchronous), "Something went wrong with valid data!")
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started,
     )
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(
         NoMemoryAvailableException,
         self.operation_service.initiate_operation,
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         **data
     )
     self._assert_no_dt2()
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)

        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 1 + float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        # Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        # plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + float(
            adapter.get_required_disk_size(**data) - 1
        )

        self.assertRaises(
            NoMemoryAvailableException,
            self.operation_service.initiate_operation,
            self.test_user,
            self.test_project.id,
            adapter,
            tmp_folder,
            **data
        )
        self._assert_stored_dt2()
Exemplo n.º 18
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
Exemplo n.º 19
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
Exemplo n.º 20
0
 def create_group(test_user=None, test_project=None, subject="John Doe"):
     """
     Create a group of 2 operations, each with at least one resultant DataType.
     """
     if test_user is None:
         test_user = TestFactory.create_user()  
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
        
     ### Retrieve Adapter instance 
     algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id) 
     
     adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
     adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
     args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
     
     ### Prepare Operations group. Execute them synchronously
     service = OperationService()
     operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
     service.launch_operation(operations[0].id, False, adapter_inst)
     service.launch_operation(operations[1].id, False, adapter_inst)
     
     resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
     return resulted_dts, operations[0].fk_operation_group
Exemplo n.º 21
0
    def test_happy_flow_region_import(self):
        """
        Verifies the happy flow for importing a region.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       ProjectionRegionEEG())
        group = dao.find_group(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixRegionEEGImporter')
        importer = ABCAdapter.build_adapter(group)

        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'region_conn_74_eeg_1020_62.mat')
        args = {
            'projection_file': zip_path,
            'dataset_name': 'ProjectionMatrix',
            'connectivity': self.connectivity.gid,
            'sensors': self.sensors.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      ProjectionRegionEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Exemplo n.º 22
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         result_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user,
                       self.test_project.id,
                       adapter,
                       tmp_folder,
                       method_name=ABCAdapter.LAUNCH_METHOD,
                       **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 0)
Exemplo n.º 23
0
 def create_operation(algorithm=None, test_user=None, test_project=None, 
                      operation_status=model.STATUS_FINISHED, parameters="test params"):
     """
     Create persisted operation.
     
     :param algorithm: When not None, introspect TVB and TVB_TEST for adapters.
     :return: Operation entity after persistence. 
     """
     if algorithm is None:
         algo_group = dao.find_group('tvb.tests.framework.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')
         algorithm = dao.get_algorithm_by_group(algo_group.id)
         
     if test_user is None:
         test_user = TestFactory.create_user()
         
     if test_project is None:
         test_project = TestFactory.create_project(test_user)
         
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe",
             DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     operation = model.Operation(test_user.id, test_project.id, algorithm.id, parameters, meta=json.dumps(meta),
                                 status=operation_status)
     dao.store_entity(operation)
     ### Make sure lazy attributes are correctly loaded.
     return dao.get_operation_by_id(operation.id)
Exemplo n.º 24
0
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
Exemplo n.º 25
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         estimated_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user, self.test_project.id, adapter,
                       tmp_folder, **data)
     self._assert_no_dt2()
Exemplo n.º 26
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)

        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (
            1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)

        datatype = self._assert_stored_dt2()
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                            float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException,
                          self.operation_service.initiate_operation,
                          self.test_user, self.test_project.id, adapter,
                          tmp_folder, **data)
        self._assert_stored_dt2()
Exemplo n.º 27
0
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = 2 * float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size) + float(
            adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2(2)
Exemplo n.º 28
0
    def parse_event_node(self):
        """
        Parse the stored event node to get required data and arguments.
        """
        kw_parameters = {}
        for one_arg in self.event_node.childNodes:
            if one_arg.nodeType != Node.ELEMENT_NODE:
                continue
            if one_arg.nodeName == ELEM_ADAPTER:
                #TODO: so far there is no need for it, but we should maybe
                #handle cases where same module/class but different init parameter
                group = dao.find_group(one_arg.getAttribute(ATT_MODULE), one_arg.getAttribute(ATT_CLASS))
                adapter = ABCAdapter.build_adapter(group)
                result_uid = one_arg.getAttribute(ATT_UID)
                if result_uid:
                    kw_parameters[ATT_UID] = result_uid
                LOGGER.debug("Adapter used is %s", str(adapter.__class__))
                self.callable_object = adapter
                continue
            if one_arg.nodeName == ELEM_METHOD:
                self.call_method = one_arg.getAttribute(ATT_NAME)
                if one_arg.getAttribute(ATT_OPERATION_HIDDEN):
                    self.operation_visible = False
                continue
            if one_arg.nodeName == ELEM_ARGS:
                kw_parameters.update(_parse_arguments(one_arg))
                continue
            LOGGER.info("Ignored undefined node %s", str(one_arg.nodeName))

        self.arguments.update(kw_parameters)
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       Connectivity())
        group = dao.find_group(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
            DataTypeMetaData.KEY_STATE: "RAW"
        }
        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'connectivity_regions_96.zip')
        args = {'uploaded': zip_path}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      Connectivity())
        self.assertTrue(dt_count_after == dt_count_before + 1)
    def test_happy_flow_surface_import(self):
        """
        Verifies the happy flow for importing a surface.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_65_surface_16k.npy"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Exemplo n.º 31
0
 def prepare_adapter(self, project_id, algo_group):
     """
     Having a given Adapter, specified by Module and ClassName, 
     create a instance of it and return the instance.
     Actually return a Tuple: Adapter Instance, Dictionary for Adapter 
     Interface specification.
     """
     adapter_module = algo_group.module.replace('-', '.')
     adapter_name = algo_group.classname
     try:
         # Prepare Adapter Interface, by populating with existent data,
         # in case of a parameter of type DataType.
         group = dao.find_group(adapter_module, adapter_name,
                                algo_group.init_parameter)
         adapter_instance = self.build_adapter_instance(group)
         interface = adapter_instance.get_input_tree()
         interface = self.prepare_parameters(interface, project_id,
                                             group.fk_category)
         interface = ABCAdapter.prepare_param_names(interface)
         return group, interface
     except Exception, excep:
         self.logger.exception(excep)
         self.logger.error('Not found:' + adapter_name + ' in:' +
                           adapter_module)
         raise OperationException("Could not prepare " + adapter_name)
    def _import(self, import_file_path, surface_gid, connectivity_gid):
        """
        This method is used for importing region mappings
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group("tvb.adapters.uploaders.region_mapping_importer", "RegionMapping_Importer")
        importer = ABCAdapter.build_adapter(group)

        args = {
            "mapping_file": import_file_path,
            "surface": surface_gid,
            "connectivity": connectivity_gid,
            DataTypeMetaData.KEY_SUBJECT: "test",
        }

        now = datetime.datetime.now()

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        # During setup we import a CFF which creates an additional RegionMapping
        # So, here we have to find our mapping (just imported)
        data_filter = FilterChain(fields=[FilterChain.datatype + ".create_date"], operations=[">"], values=[now])
        region_mapping = self._get_entity(RegionMapping(), data_filter)

        return region_mapping
Exemplo n.º 33
0
    def _create_mapped_arrays(self, project_id):

        array_wrappers = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(array_wrappers), 0)

        algo_group = dao.find_group('tvb_test.adapters.ndimensionarrayadapter',
                                    'NDimensionArrayAdapter')
        group, _ = self.flow_service.prepare_adapter(project_id, algo_group)

        adapter_instance = self.flow_service.build_adapter_instance(group)
        data = {'param_1': 'some value'}
        #create 3 data types
        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        array_wrappers = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(array_wrappers), 1)

        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        array_wrappers = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(array_wrappers), 2)

        self.flow_service.fire_operation(adapter_instance, self.test_user,
                                         project_id, **data)
        array_wrappers = self.flow_service.get_available_datatypes(
            project_id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(array_wrappers), 3)

        return array_wrappers
Exemplo n.º 34
0
 def test_happy_flow_surface_import(self):
     dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                    ProjectionSurfaceEEG())
     group = dao.find_group(
         'tvb.adapters.uploaders.projection_matrix_importer',
         'ProjectionMatrixSurfaceEEGImporter')
     importer = ABCAdapter.build_adapter(group)
     importer.meta_data = {
         DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
         DataTypeMetaData.KEY_STATE: "RAW"
     }
     zip_path = os.path.join(
         os.path.abspath(os.path.dirname(dataset.__file__)),
         'region_conn_74_eeg_1020_62.mat')
     args = {
         'projection_file': zip_path,
         'dataset_name': 'ProjectionMatrix',
         'connectivity': self.connectivity.gid,
         'sensors': self.sensors.gid,
         'surface': self.surface.gid
     }
     FlowService().fire_operation(importer, self.test_user,
                                  self.test_project.id, **args)
     dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                   ProjectionRegionEEG())
     self.assertTrue(dt_count_after == dt_count_before + 1)
Exemplo n.º 35
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
Exemplo n.º 36
0
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 0, "There should be no operation")

        algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(len(all_operations), 1, "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
        self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
    def test_wrong_shape(self):
        """
        Verifies that importing a different shape throws exception
        """
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_62_surface_16k.mat"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        try:
            FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
            self.fail("This was expected not to run! 62 rows in proj matrix, but 65 sensors")
        except OperationException:
            pass
Exemplo n.º 38
0
    def parse_event_node(self):
        """
        Parse the stored event node to get required data and arguments.
        """
        kw_parameters = {}
        for one_arg in self.event_node.childNodes:
            if one_arg.nodeType != Node.ELEMENT_NODE:
                continue
            if one_arg.nodeName == ELEM_ADAPTER:
                #TODO: so far there is no need for it, but we should maybe
                #handle cases where same module/class but different init parameter
                group = dao.find_group(one_arg.getAttribute(ATT_MODULE),
                                       one_arg.getAttribute(ATT_CLASS))
                adapter = ABCAdapter.build_adapter(group)
                result_uid = one_arg.getAttribute(ATT_UID)
                if result_uid:
                    kw_parameters[ATT_UID] = result_uid
                LOGGER.debug("Adapter used is %s", str(adapter.__class__))
                self.callable_object = adapter
                continue
            if one_arg.nodeName == ELEM_METHOD:
                self.call_method = one_arg.getAttribute(ATT_NAME)
                if one_arg.getAttribute(ATT_OPERATION_HIDDEN):
                    self.operation_visible = False
                continue
            if one_arg.nodeName == ELEM_ARGS:
                kw_parameters.update(_parse_arguments(one_arg))
                continue
            LOGGER.info("Ignored undefined node %s", str(one_arg.nodeName))

        self.arguments.update(kw_parameters)
Exemplo n.º 39
0
    def import_cff(cff_path=None, test_user=None, test_project=None):
        """
        This method is used for importing a CFF data-set (load CFF_Importer, launch it).
        :param cff_path: absolute path where CFF file exists. When None, a default CFF will be used.
        :param test_user: optional persisted User instance, to use as Operation->launcher
        :param test_project: optional persisted Project instance, to use for launching Operation in it. 
        """
        ### Prepare Data
        if cff_path is None:
            cff_path = os.path.join(os.path.dirname(cff_dataset.__file__),
                                    'connectivities.cff')
        if test_user is None:
            test_user = TestFactory.create_user()
        if test_project is None:
            test_project = TestFactory.create_project(test_user)

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer',
                               'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'cff': cff_path,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id,
                                     **args)
Exemplo n.º 40
0
    def _export_linked_datatypes(self, project, zip_file):
        files_helper = FilesHelper()
        linked_paths = self._get_linked_datatypes_storage_path(project)

        if not linked_paths:
            # do not export an empty operation
            return

        # Make a import operation which will contain links to other projects
        alg_group = dao.find_group(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS)
        algo = dao.get_algorithm_by_group(alg_group.id)
        op = model.Operation(None, project.id, algo.id, '')
        op.project = project
        op.algorithm = algo
        op.id = 'links-to-external-projects'
        op.start_now()
        op.mark_complete(model.STATUS_FINISHED)

        # write operation.xml to disk
        files_helper.write_operation_metadata(op)
        op_folder = files_helper.get_operation_folder(op.project.name, op.id)
        operation_xml = files_helper.get_operation_meta_file_path(op.project.name, op.id)
        op_folder_name = os.path.basename(op_folder)

        # add operation.xml
        zip_file.write(operation_xml, op_folder_name + '/' + os.path.basename(operation_xml))

        # add linked datatypes to archive in the import operation
        for pth in linked_paths:
            zip_pth = op_folder_name + '/' + os.path.basename(pth)
            zip_file.write(pth, zip_pth)

        # remove these files, since we only want them in export archive
        files_helper.remove_folder(op_folder)
Exemplo n.º 41
0
    def test_delete_dt_free_HDD_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = float(
            adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                      "TEMP")

        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        datatype = self._assert_stored_dt2()

        # Now free some space and relaunch
        ProjectService().remove_datatype(self.test_project.id, datatype.gid)
        self._assert_no_dt2()
        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project.id,
                                                  adapter, tmp_folder, **data)
        self._assert_stored_dt2()
Exemplo n.º 42
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, time_series.module + "." + time_series.type)
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "TimeSeries should not be none")

        return time_series
Exemplo n.º 43
0
 def import_sensors(user, project, zip_path, sensors_type):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')
     importer = ABCAdapter.build_adapter(group)
     args = {'sensors_file': zip_path, 'sensors_type': sensors_type}
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
Exemplo n.º 44
0
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.gifti_surface_importer',
                               'GIFTISurfaceImporter')
        importer = ABCAdapter.build_adapter(group)

        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: ""
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, surface.module + "." + surface.type)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "TimeSeries should not be none")

        return surface
Exemplo n.º 45
0
def remove_visualizer_references():
    """
    As we removed an algorithm, remove left-overs.
    """

    LOGGER.info("Starting to remove references towards old viewer ....")

    pearson_group = dao.find_group(
        'tvb.adapters.visualizers.cross_correlation',
        'PearsonCorrelationCoefficientVisualizer')
    pearson_algorithm = dao.get_algorithm_by_group(pearson_group.id)

    pearson_operations = dao.get_generic_entity(model.Operation,
                                                pearson_algorithm.id,
                                                "fk_from_algo")
    for op in pearson_operations:
        dao.remove_entity(model.Operation, op.id)

    pearson_workflows = dao.get_generic_entity(model.WorkflowStepView,
                                               pearson_algorithm.id,
                                               "fk_algorithm")
    for ws in pearson_workflows:
        dao.remove_entity(model.WorkflowStepView, ws.id)

    LOGGER.info("References removed.")
 def test_adapter_memory(self):
     """
     Test that a method not implemented exception is raised in case the
     get_required_memory_size method is not implemented.
     """
     algo_group = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired")
     adapter = FlowService().build_adapter_instance(algo_group)
     self.assertEqual(42, adapter.get_required_memory_size())
Exemplo n.º 47
0
 def get_algorithm_by_module_and_class(module, classname):
     """
     Get the db entry from the algorithm table for the given module and 
     class.
     """
     group = dao.find_group(module, classname)
     algo = dao.get_algorithm_by_group(group.id)
     return algo, group
Exemplo n.º 48
0
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer', 'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {'cff': cff_path, DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
    def _import_connectivity(self):
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
Exemplo n.º 50
0
    def import_surface_obj(user, project, obj_path, surface_type):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': obj_path,
                'surface_type': surface_type}

        ### Launch Operation
        FlowService().fire_operation(importer, user, project.id, **args)
Exemplo n.º 51
0
 def test_fire_operation(self):
     """
     Test preparation of an adapter and launch mechanism.
     """
     algo_group = dao.find_group(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS)
     adapter = self.flow_service.build_adapter_instance(algo_group)
     data = {"test": 5}
     result = self.flow_service.fire_operation(adapter, self.test_user, self.test_project.id, **data)
     self.assertTrue(result.endswith("has finished."), "Operation fail")
Exemplo n.º 52
0
 def import_surface_zip(user, project, zip_path, surface_type, zero_based):
     ### Retrieve Adapter instance 
     group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer', 'ZIPSurfaceImporter')
     importer = ABCAdapter.build_adapter(group)
     args = {'uploaded': zip_path, 'surface_type': surface_type,
             'zero_based_triangles': zero_based}
     
     ### Launch Operation
     FlowService().fire_operation(importer, user, project.id, **args)
def upgrade(_migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    db_group = dao.find_group('tvb.adapters.uploaders.csv_connectivity_importer', 'ZIPConnectivityImporter')
    if db_group is not None:
        db_group.module = 'tvb.adapters.uploaders.csv_connectivity_importer'
        db_group.classname = 'CSVConnectivityImporter'
        dao.store_entity(db_group)
def downgrade(_migrate_engine):
    """Operations to reverse the above upgrade go here."""
    simulator_db_group = dao.find_group('tvb.adapters.simulator.simulator_adapter', 'SimulatorAdapter')
    if simulator_db_group is not None:
        simulator_db_group.module = 'tvb.adapters.simulator.simulatorAdapter'
        simulator_db_group.classname = 'SimulatorAdapter'
        dao.store_entity(simulator_db_group)
        
        
        
def upgrade(_migrate_engine):
    """
    Upgrade operations go here.
    Don't create your own engine; bind migrate_engine to your metadata.
    """
    simulator_db_group = dao.find_group('tvb.adapters.simulator.simulatorAdapter', 'SimulatorAdapter')
    if simulator_db_group is not None:
        simulator_db_group.module = 'tvb.adapters.simulator.simulator_adapter'
        simulator_db_group.classname = 'SimulatorAdapter'
        dao.store_entity(simulator_db_group)
def downgrade(_migrate_engine):
    """Operations to reverse the above upgrade go here."""
    db_group = dao.find_group('tvb.adapters.uploaders.csv_connectivity_importer', 'CSVConnectivityImporter')
    if db_group is not None:
        db_group.module = 'tvb.adapters.uploaders.csv_connectivity_importer'
        db_group.classname = 'ZIPConnectivityImporter'
        dao.store_entity(db_group)
        
        
        
    def import_test_connectivity96(test_user, test_project, subject=DataTypeMetaData.DEFAULT_SUBJECT):
        """
        Import a connectivity with 96 regions from tvb_data.
        """
        group = dao.find_group('tvb.adapters.uploaders.zip_connectivity_importer', 'ZIPConnectivityImporter')
        importer = ABCAdapter.build_adapter(group)

        data_dir = path.abspath(path.dirname(tvb_data.__file__))
        zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
        ### Launch Operation
        FlowService().fire_operation(importer, test_user, test_project.id, uploaded=zip_path, Data_Subject=subject)
Exemplo n.º 58
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance 
        group = dao.find_group('tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
Exemplo n.º 59
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id, algo,
                                                               algo_category, {}, ABCAdapter.LAUNCH_METHOD, **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations