コード例 #1
0
    def test_launch_two_ops_HDD_with_space(self):
        """
        Launch two operations and give enough available space for user so that both should finish.
        """
        module = "tvb_test.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TVBSettings.MAX_DISK_SPACE = 2 * float(adapter.get_required_disk_size(**data))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TVBSettings.MAX_DISK_SPACE = float(datatype.disk_size) + float(adapter.get_required_disk_size(**data))

        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
        self.assertEqual(len(dts), 2)
        datatype = dao.get_datatype_by_id(dts[1][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #2
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
コード例 #3
0
    def test_launch_two_ops_HDD_full_space(self):
        """
        Launch two operations and give available space for user so that the first should finish,
        but after the update to the user hdd size the second should not.
        """
        module = "tvb.tests.framework.adapters.testadapter3"
        class_name = "TestAdapterHDDRequired"
        group = dao.find_group(module, class_name)
        adapter = FlowService().build_adapter_instance(group)
        output = adapter.get_output()
        output_type = output[0].__name__
        data = {"test": 100}
        TvbProfile.current.MAX_DISK_SPACE = (1 + float(adapter.get_required_disk_size(**data)))
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
        self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                  tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
        datatype = dao.get_datatype_by_id(dts[0][0])
        self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
        self.assertEqual(datatype.type, output_type, "Wrong data stored.")
        #Now update the maximum disk size to be less than size of the previously resulted datatypes (transform kB to MB)
        #plus what is estimated to be required from the next one (transform from B to MB)
        TvbProfile.current.MAX_DISK_SPACE = float(datatype.disk_size - 1) + \
                                                float(adapter.get_required_disk_size(**data) - 1)

        self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                          self.test_project.id, adapter,
                          tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
        dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
        self.assertEqual(len(dts), 1)
コード例 #4
0
 def test_launch_operation_HDD_full_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(
         self.test_user.id,
         self.test_project.id,
         group.id,
         "",
         status=model.STATUS_STARTED,
         result_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data) + space_taken_by_started -
         1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.assertRaises(NoMemoryAvailableException,
                       self.operation_service.initiate_operation,
                       self.test_user,
                       self.test_project.id,
                       adapter,
                       tmp_folder,
                       method_name=ABCAdapter.LAUNCH_METHOD,
                       **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 0)
コード例 #5
0
ファイル: forms.py プロジェクト: yop0/tvb-root
 def _get_values_from_db(self):
     all_conditions = FilterChain()
     all_conditions += self.conditions
     all_conditions += self.dynamic_conditions
     filtered_datatypes, count = dao.get_values_of_datatype(
         self.owner.project_id, self.datatype_index, all_conditions)
     return filtered_datatypes
コード例 #6
0
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TVBSettings.MAX_DISK_SPACE = float(
         adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #7
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data
     )
     self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
     self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #8
0
def prepare_mapped_sensors_as_measure_points_params(project_id,
                                                    sensors,
                                                    eeg_cap=None):
    """
    Compute sensors positions by mapping them to the ``eeg_cap`` surface
    If ``eeg_cap`` is not specified the mapping will use a default EEGCal DataType in current project.
    If no default EEGCap is found, return sensors as they are (not projected)

    :returns: dictionary to be used in Viewers for rendering measure_points
    :rtype: dict
    """

    if eeg_cap is None:
        eeg_cap = dao.get_values_of_datatype(project_id, EEGCap,
                                             page_size=1)[0]
        if eeg_cap:
            eeg_cap = ABCDisplayer.load_entity_by_gid(eeg_cap[-1][2])

    if eeg_cap:
        datatype_kwargs = json.dumps({'surface_to_map': eeg_cap.gid})
        sensor_locations = ABCDisplayer.paths2url(
            sensors, 'sensors_to_surface') + '/' + datatype_kwargs
        sensor_no = sensors.number_of_sensors
        sensor_labels = ABCDisplayer.paths2url(sensors, 'labels')

        return {
            'urlMeasurePoints': sensor_locations,
            'urlMeasurePointsLabels': sensor_labels,
            'noOfMeasurePoints': sensor_no,
            'minMeasure': 0,
            'maxMeasure': sensor_no,
            'urlMeasure': ''
        }

    return prepare_sensors_as_measure_points_params(sensors)
コード例 #9
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     adapter = TestFactory.create_adapter(module, class_name)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user, self.test_project.id, adapter, tmp_folder, **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.get_algorithm_by_module(module, class_name)
     self.assertEqual(group.module,
                      'tvb.tests.framework.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts, count = dao.get_values_of_datatype(self.test_project.id,
                                             Datatype1)
     self.assertEqual(count, 1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #10
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRI,
                connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {
            'data_file': import_file_path,
            DataTypeMetaData.KEY_SUBJECT: "bla bla",
            'apply_corrections': True,
            'connectivity': connectivity
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        self.assertEqual(1, count,
                         "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
コード例 #11
0
    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        test_adapter_factory()
        adapter = TestFactory.create_adapter(
            "tvb.tests.framework.adapters.testadapter1", "TestAdapter1")
        view_model = TestModel()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        adapter.generic_attributes.subject = "Test4242"

        self.operation_service.initiate_operation(self.test_user,
                                                  self.test_project,
                                                  adapter,
                                                  model_view=view_model)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == "Test4242", "Wrong data stored."
        assert datatype.type == adapter.get_output(
        )[0].__name__, "Wrong data stored."
コード例 #12
0
    def test_initiate_operation(self, test_adapter_factory):
        """
        Test the actual operation flow by executing a test adapter.
        """
        module = "tvb.tests.framework.adapters.testadapter1"
        class_name = "TestAdapter1"
        test_adapter_factory()
        adapter = TestFactory.create_adapter(module, class_name)
        output = adapter.get_output()
        output_type = output[0].__name__
        tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")

        view_model = adapter.get_view_model()()
        view_model.test1_val1 = 5
        view_model.test1_val2 = 5
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter,
                                                  tmp_folder, model_view=view_model)

        group = dao.get_algorithm_by_module(module, class_name)
        assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
        assert group.classname == 'TestAdapter1', "Wrong data stored."
        dts, count = dao.get_values_of_datatype(self.test_project.id, DummyDataTypeIndex)
        assert count == 1
        assert len(dts) == 1
        datatype = dao.get_datatype_by_id(dts[0][0])
        assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
        assert datatype.type == output_type, "Wrong data stored."
コード例 #13
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRIIndex,
                connectivity_gid=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """
        view_model = NIFTIImporterModel()
        view_model.data_file = import_file_path
        view_model.mappings_file = self.TXT_FILE
        view_model.apply_corrections = True
        view_model.connectivity = connectivity_gid
        view_model.data_subject = "Bla Bla"

        TestFactory.launch_importer(NIFTIImporter, view_model, self.test_user,
                                    self.test_project, False)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
コード例 #14
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb_test.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project,
                                                   "TEMP")
     res = self.operation_service.initiate_operation(
         self.test_user,
         self.test_project.id,
         adapter,
         tmp_folder,
         method_name=ABCAdapter.LAUNCH_METHOD,
         **data)
     self.assertTrue(
         res.index("has finished.") > 10, "Operation didn't finish")
     group = dao.find_group(module, class_name)
     self.assertEqual(group.module, 'tvb_test.adapters.testadapter1',
                      "Wrong data stored.")
     self.assertEqual(group.classname, 'TestAdapter1', "Wrong data stored.")
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT,
                      "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #15
0
def get_filtered_datatypes(project_id, data_type_cls, filters=None, page_size=50):
    """
    Return all dataTypes that match a given name and some filters.
    :param data_type_cls: either a fully qualified class name or a class object
    """
    if isinstance(data_type_cls, str):
        data_type_cls = get_class_by_name(data_type_cls)
    LOGGER.debug('Filtering:' + str(data_type_cls))
    return dao.get_values_of_datatype(project_id, data_type_cls, filters, page_size)
コード例 #16
0
ファイル: input_tree.py プロジェクト: boegel/tvb-framework
 def _get_available_datatypes(self, project_id, data_type_cls, filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     :param data_type_cls: either a fully qualified class name or a class object
     """
     if isinstance(data_type_cls, basestring):
         data_type_cls = get_class_by_name(data_type_cls)
     self.log.debug('Filtering:' + str(data_type_cls))
     return dao.get_values_of_datatype(project_id, data_type_cls, filters, MAXIMUM_DATA_TYPES_DISPLAYED)
コード例 #17
0
ファイル: load.py プロジェクト: gummadhav/tvb-framework
def get_filtered_datatypes(project_id, data_type_cls, filters=None, page_size=50):
    """
    Return all dataTypes that match a given name and some filters.
    :param data_type_cls: either a fully qualified class name or a class object
    """
    if isinstance(data_type_cls, basestring):
        data_type_cls = get_class_by_name(data_type_cls)
    LOGGER.debug('Filtering:' + str(data_type_cls))
    return dao.get_values_of_datatype(project_id, data_type_cls, filters, page_size)
コード例 #18
0
def _retrieve_entities_by_filters(kind, project_id, filters):

    named_tuple_array, counter = dao.get_values_of_datatype(project_id, kind, filters)
    print("Found " + str(counter) + " entities of type " + str(kind))

    result = []
    for named_tuple in named_tuple_array:
        dt_id = named_tuple[0]
        result.append(dao.get_generic_entity(kind, dt_id)[0])

    return result
コード例 #19
0
ファイル: flowservice.py プロジェクト: wvangeit/framework_tvb
 def get_available_datatypes(self, project_id, data_name, filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     """
     data_class = FilterChain._get_class_instance(data_name)
     if data_class is None:
         self.logger.warning("Invalid Class specification:" + str(data_name))
         return []
     else:
         self.logger.debug('Filtering:' + str(data_class))
         return dao.get_values_of_datatype(project_id, data_class, filters)
コード例 #20
0
 def get_available_datatypes(self, project_id, data_name, filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     """
     data_class = FilterChain._get_class_instance(data_name)
     if data_class is None:
         self.logger.warning("Invalid Class specification:" + str(data_name))
         return [], 0
     else:
         self.logger.debug('Filtering:' + str(data_class))
         return dao.get_values_of_datatype(project_id, data_class, filters, self.MAXIMUM_DATA_TYPES_DISPLAYED)
コード例 #21
0
def _retrieve_entities_by_filters(kind, project_id, filters):

    named_tuple_array, counter = dao.get_values_of_datatype(project_id, kind, filters)
    print "Found " + str(counter) + " entities of type " + str(kind)

    result = []
    for named_tuple in named_tuple_array:
        dt_id = named_tuple[0]
        result.append(dao.get_generic_entity(kind, dt_id)[0])

    return result
コード例 #22
0
ファイル: brain.py プロジェクト: unimauro/tvb-framework
    def get_shell_surface_urls(shell_surface=None, project_id=0):

        if shell_surface is None:
            shell_surface = dao.get_values_of_datatype(project_id, FaceSurface)[0]

            if not shell_surface:
                raise Exception('No face object found in database.')

            shell_surface = ABCDisplayer.load_entity_by_gid(shell_surface[0][2])

        face_vertices, face_normals, _, face_triangles = shell_surface.get_urls_for_rendering()
        return json.dumps([face_vertices, face_normals, face_triangles])
コード例 #23
0
ファイル: input_tree.py プロジェクト: roxanast/tvb-framework
 def _get_available_datatypes(self,
                              project_id,
                              data_type_cls,
                              filters=None):
     """
     Return all dataTypes that match a given name and some filters.
     :param data_type_cls: either a fully qualified class name or a class object
     """
     if isinstance(data_type_cls, basestring):
         data_type_cls = get_class_by_name(data_type_cls)
     self.log.debug('Filtering:' + str(data_type_cls))
     return dao.get_values_of_datatype(project_id, data_type_cls, filters,
                                       MAXIMUM_DATA_TYPES_DISPLAYED)
コード例 #24
0
def prepare_shell_surface_urls(project_id, shell_surface=None):

    if shell_surface is None:
        shell_surface = dao.get_values_of_datatype(project_id,
                                                   FaceSurface,
                                                   page_size=1)[0]

        if not shell_surface:
            raise Exception('No Face object found in current project.')

        shell_surface = ABCDisplayer.load_entity_by_gid(shell_surface[0][2])

    face_vertices, face_normals, _, face_triangles = shell_surface.get_urls_for_rendering(
    )
    return json.dumps([face_vertices, face_normals, face_triangles])
コード例 #25
0
 def test_launch_operation_HDD_full_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                       self.test_project.id, adapter,
                       tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
     self.assertEqual(len(dts), 0)
コード例 #26
0
 def test_launch_operation_HDD_full_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) - 1)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.assertRaises(NoMemoryAvailableException, self.operation_service.initiate_operation, self.test_user,
                       self.test_project.id, adapter,
                       tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
     self.assertEqual(len(dts), 0)
コード例 #27
0
    def _import(self,
                import_file_path=None,
                expected_result_class=StructuralMRIIndex,
                connectivity_gid=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')

        form = NIFTIImporterForm()
        form.fill_from_post({
            'data_file':
            Part(import_file_path, HeaderMap({}), ''),
            'apply_corrections':
            'True',
            'connectivity':
            connectivity_gid,
            'mappings_file':
            Part(self.TXT_FILE, HeaderMap({}), ''),
            'Data_Subject':
            'bla bla'
        })
        form.data_file.data = import_file_path
        form.mappings_file.data = self.TXT_FILE
        view_model = form.get_view_model()()
        view_model.data_subject = 'bla bla'
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     view_model=view_model)

        dts, count = dao.get_values_of_datatype(self.test_project.id,
                                                expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
コード例 #28
0
 def fill_selectfield_with_datatypes(self,
                                     field,
                                     project_id,
                                     extra_conditions=None):
     # type: (TraitDataTypeSelectField, int, list) -> None
     filtering_conditions = FilterChain()
     filtering_conditions += field.conditions
     filtering_conditions += extra_conditions
     datatypes, _ = dao.get_values_of_datatype(project_id,
                                               field.datatype_index,
                                               filtering_conditions)
     datatype_options = []
     for datatype in datatypes:
         display_name = self._prepare_dt_display_name(
             field.datatype_index, datatype)
         datatype_options.append((datatype, display_name))
     field.datatype_options = datatype_options
コード例 #29
0
 def test_launch_operation_HDD_with_space(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data))
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                               tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #30
0
    def _import(self, import_file_path=None, expected_result_class=StructuralMRI, connectivity=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter')
        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla",
                'apply_corrections': True, 'connectivity': connectivity}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        dts, count = dao.get_values_of_datatype(self.test_project.id, expected_result_class, None)
        self.assertEqual(1, count, "Project should contain only one data type.")

        result = ABCAdapter.load_entity_by_gid(dts[0][2])
        self.assertTrue(result is not None, "Result should not be none")
        return result
コード例 #31
0
ファイル: brain.py プロジェクト: unimauro/tvb-framework
    def compute_sensor_surfacemapped_measure_points(project_id, sensors, eeg_cap=None):
        """
        Compute sensors positions by mapping them to the ``eeg_cap`` surface
        If ``eeg_cap`` is not specified the mapping will use a default.
        It returns a url from where to fetch the positions
        If no default is available it returns None
        :returns: measure points, measure points labels, measure points number
        :rtype: tuple
        """

        if eeg_cap is None:
            eeg_cap = dao.get_values_of_datatype(project_id, EEGCap)[0]
            if eeg_cap:
                eeg_cap = ABCDisplayer.load_entity_by_gid(eeg_cap[-1][2])

        if eeg_cap:
            datatype_kwargs = json.dumps({'surface_to_map': eeg_cap.gid})
            measure_points = ABCDisplayer.paths2url(sensors, 'sensors_to_surface') + '/' + datatype_kwargs
            measure_points_no = sensors.number_of_sensors
            measure_points_labels = ABCDisplayer.paths2url(sensors, 'labels')
            return measure_points, measure_points_labels, measure_points_no
コード例 #32
0
 def test_initiate_operation(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     adapter = TestFactory.create_adapter(module, class_name)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test1_val1": 5, "test1_val2": 5}
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                                     tmp_folder, **data)
     assert res.index("has finished.") > 10, "Operation didn't finish"
     group = dao.get_algorithm_by_module(module, class_name)
     assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
     assert group.classname == 'TestAdapter1', "Wrong data stored."
     dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
     assert count == 1
     assert len(dts) == 1
     datatype = dao.get_datatype_by_id(dts[0][0])
     assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
     assert datatype.type == output_type, "Wrong data stored."
コード例 #33
0
 def test_launch_operation_HDD_with_space_started_ops(self):
     """
     Test the actual operation flow by executing a test adapter.
     """
     space_taken_by_started = 100
     module = "tvb.tests.framework.adapters.testadapter3"
     class_name = "TestAdapterHDDRequired"
     group = dao.find_group(module, class_name)
     started_operation = model.Operation(self.test_user.id, self.test_project.id, group.id, "",
                                         status=model.STATUS_STARTED, estimated_disk_size=space_taken_by_started)
     dao.store_entity(started_operation)
     adapter = FlowService().build_adapter_instance(group)
     output = adapter.get_output()
     output_type = output[0].__name__
     data = {"test": 100}
     TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(**data) + space_taken_by_started)
     tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
     self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
                                               tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data)
     dts = dao.get_values_of_datatype(self.test_project.id, Datatype2)[0]
     self.assertEqual(len(dts), 1)
     datatype = dao.get_datatype_by_id(dts[0][0])
     self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
     self.assertEqual(datatype.type, output_type, "Wrong data stored.")
コード例 #34
0
 def _get_values_from_db(self):
     filtered_datatypes, count = dao.get_values_of_datatype(
         self.owner.project_id, self.datatype_index, self.conditions)
     return filtered_datatypes