Exemple #1
0
 def __create_operation(self):
     """
     Create a operation entity. Return the operation, algo_id and the storage path.
     """
     meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
     algorithm = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
     operation = model.Operation(self.user.id, self.project.id, algorithm.id, json.dumps(''), meta=json.dumps(meta),
                                 status=model.STATUS_STARTED)
     operation = dao.store_entity(operation)
     storage_path = FilesHelper().get_project_folder(self.project, str(operation.id))
     return operation, algorithm.id, storage_path
Exemple #2
0
 def test_zip_import(self):
     """
         This method tests import of TVB data in zip format (which imply multiple data types
         in the same zip file - exported from a group)
     """
     self._import(self.zip_file_path)
     count = FlowService().get_available_datatypes(
         self.test_project.id,
         self.datatype.module + "." + self.datatype.type)[1]
     self.assertEqual(9, count,
                      "9 datatypes should have been imported from group.")
Exemple #3
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)
    group = dao.get_algorithm_by_module(
        'tvb.adapters.uploaders.zip_connectivity_importer',
        'ZIPConnectivityImporter')
    importer = ABCAdapter.build_adapter(group)
    ### Launch Operation
    FlowService().fire_operation(importer,
                                 project.administrator,
                                 project_id,
                                 uploaded=zip_path)
Exemple #4
0
    def transactional_setup_method(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(
            model_operation.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                      TEST_ADAPTER_VALID_CLASS, category.id))
Exemple #5
0
def import_conn_zip(project_id, zip_path):
    project = dao.get_project_by_id(project_id)

    importer = ABCAdapter.build_adapter_from_class(ZIPConnectivityImporter)
    params = {"_uploaded": zip_path}
    form = ZIPConnectivityImporterForm()
    form.uploaded.data = zip_path
    importer.submit_form(form)

    FlowService().fire_operation(importer, project.administrator, project_id,
                                 **params)
Exemple #6
0
    def get_entity(project, expected_data, filters=None):
        """
        Return the first entity with class given by `expected_data`

        :param expected_data: specifies the class whose entity is returned
        """
        data_types = FlowService().get_available_datatypes(
            project.id, expected_data.module + "." + expected_data.type,
            filters)[0]
        entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        return entity
    def __init__(self):
        self.logger = get_logger(self.__class__.__module__)

        self.user_service = UserService()
        self.flow_service = FlowService()

        self.analyze_category_link = '/flow/step_analyzers'
        self.analyze_adapters = None

        self.connectivity_tab_link = '/flow/step_connectivity'
        view_category = self.flow_service.get_visualisers_category()
        conn_id = self.flow_service.get_algorithm_by_module_and_class(
            CONNECTIVITY_MODULE, CONNECTIVITY_CLASS).id
        connectivity_link = self.get_url_adapter(view_category.id, conn_id)

        self.connectivity_submenu = [
            dict(title="Large Scale Connectivity",
                 link=connectivity_link,
                 subsection=WebStructure.SUB_SECTION_CONNECTIVITY,
                 description=
                 "View Connectivity Regions. Perform Connectivity lesions"),
            dict(title="Local Connectivity",
                 link='/spatial/localconnectivity/step_1/1',
                 subsection=WebStructure.SUB_SECTION_LOCAL_CONNECTIVITY,
                 description=
                 "Create or view existent Local Connectivity entities.")
        ]

        allen_algo = self.flow_service.get_algorithm_by_module_and_class(
            ALLEN_CREATOR_MODULE, ALLEN_CREATOR_CLASS)
        if allen_algo and not allen_algo.removed:
            # Only add the Allen Creator if AllenSDK is installed
            allen_link = self.get_url_adapter(allen_algo.fk_category,
                                              allen_algo.id)
            self.connectivity_submenu.append(
                dict(
                    title="Allen Connectome Builder",
                    link=allen_link,
                    subsection=WebStructure.SUB_SECTION_ALLEN,
                    description=
                    "Download data from Allen dataset and create a mouse connectome"
                ))

        self.burst_submenu = [
            dict(link='/burst',
                 subsection=WebStructure.SUB_SECTION_BURST,
                 title='Simulation Cockpit',
                 description='Manage simulations'),
            dict(link='/burst/dynamic',
                 subsection='dynamic',
                 title='Phase plane',
                 description='Configure model dynamics')
        ]
Exemple #8
0
    def _import(self, import_file_name):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.connectivity_measure_importer',
                                              'ConnectivityMeasureImporter')
        path = os.path.join(os.path.dirname(test_data.__file__), import_file_name)

        args = {'data_file': path,
                'connectivity': self.connectivity.gid,
                DataTypeMetaData.KEY_SUBJECT: "John"}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
Exemple #9
0
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.cff_importer', 'CFF_Importer')
        args = {
            'cff': cff_path,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
Exemple #10
0
    def import_projection_matrix(user, project, file_path, sensors_gid,
                                 surface_gid):
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixSurfaceEEGImporter')

        form = ProjectionMatrixImporterForm()

        form.fill_from_post({
            'projection_file':
            Part(file_path, HeaderMap({}), ''),
            'dataset_name':
            'ProjectionMatrix',
            'sensors':
            sensors_gid,
            'surface':
            surface_gid,
            'Data_Subject':
            'John Doe'
        })
        form.projection_file.data = file_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, ProjectionMatrixIndex)[0]
        assert 1 == len(
            data_types
        ), "Project should contain only one data type = Projection Matrix."

        projection_matrix = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert projection_matrix is not None, "Projection Matrix instance should not be none"

        return projection_matrix
Exemple #11
0
    def import_surface_gifti(user, project, path):
        """
        This method is used for importing data in GIFIT format
        :param path: absolute path of the file to be imported
        """

        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.gifti_surface_importer',
            'GIFTISurfaceImporter')

        form = GIFTISurfaceImporterForm()
        form.fill_from_post({
            'file_type': form.get_view_model().KEY_OPTION_READ_METADATA,
            'data_file': Part(path, HeaderMap({}), ''),
            'data_file_part2': Part('', HeaderMap({}), ''),
            'should_center': 'False',
            'Data_Subject': 'John Doe',
        })
        form.data_file.data = path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        surface = CorticalSurface
        data_types = FlowService().get_available_datatypes(
            project.id, surface.__module__ + "." + surface.__name__)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface
Exemple #12
0
    def import_surface_zip(user, project, zip_path, surface_type, zero_based):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')
        args = {
            'uploaded': zip_path,
            'surface_type': surface_type,
            'zero_based_triangles': zero_based
        }

        ### Launch Operation
        FlowService().fire_operation(importer, user, project.id, **args)
    def _run_cff_importer(self, cff_path):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.cff_importer',
                               'CFF_Importer')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'cff': cff_path,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
Exemple #14
0
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in TVB format
        :param import_file_path: absolute path of the file to be imported
        """
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.tvb_importer', 'TVBImporter')
        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
    def import_surface_zip(user, project, zip_path, surface_type, zero_based):
        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.zip_surface_importer',
                               'ZIPSurfaceImporter')
        importer = ABCAdapter.build_adapter(group)
        args = {
            'uploaded': zip_path,
            'surface_type': surface_type,
            'zero_based_triangles': zero_based
        }

        ### Launch Operation
        FlowService().fire_operation(importer, user, project.id, **args)
 def setUp(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
Exemple #17
0
 def _launch_test_algo_on_cluster(self, **data):
     module = "tvb.tests.framework.adapters.testadapter1"
     class_name = "TestAdapter1"
     group = dao.find_group(module, class_name)
     adapter = FlowService().build_adapter_instance(group)
     algo_group = adapter.algorithm_group
     algo_category = dao.get_category_by_id(algo_group.fk_category)
     algo = dao.get_algorithm_by_group(algo_group.id)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project.id, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
Exemple #19
0
    def test_h5_import(self, transactional_setup_fixture):
        """
            This method tests import of TVB data in h5 format. Single data type / import
        """
        self._import(self.h5_file_path)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           self.datatype.module + "." + self.datatype.type)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert data_type_entity is not None, "Datatype should not be none"
        assert self.datatype.gid, data_type_entity.gid == "Imported datatype should have the same gid"
    def _import(self, import_file_path, sensors_type, expected_data):
        """
        This method is used for importing sensors
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.sensors_importer', 'Sensors_Importer')

        args = {'sensors_file': import_file_path, 'sensors_type': sensors_type}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           expected_data.module + "." + expected_data.type)[0]
        self.assertEqual(1, len(data_types), "Project should contain only one data type = Sensors.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None, "Sensors instance should not be none")

        return time_series
Exemple #21
0
    def import_surface_zip(user,
                           project,
                           zip_path,
                           surface_type,
                           zero_based='True'):
        # Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_surface_importer',
            'ZIPSurfaceImporter')

        form = ZIPSurfaceImporterForm()
        form.fill_from_post({
            'uploaded': Part(zip_path, HeaderMap({}), ''),
            'zero_based_triangles': zero_based,
            'should_center': 'True',
            'surface_type': surface_type,
            'Data_Subject': 'John Doe'
        })
        form.uploaded.data = zip_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer,
                                     user,
                                     project.id,
                                     view_model=view_model)

        data_types = FlowService().get_available_datatypes(
            project.id, SurfaceIndex)[0]
        assert 1, len(
            data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        surface.user_tag_3 = ''
        assert surface is not None, "Surface should not be None"
        return surface
Exemple #22
0
    def import_surface_obj(user, project, obj_path, surface_type):
        # Retrieve Adapter instance
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        form = ObjSurfaceImporterForm()
        form.fill_from_post({'_data_file': Part(obj_path, HeaderMap({}), ''),
                             '_surface_type': surface_type,
                             '_Data_Subject': 'John Doe'
                             })
        form.data_file.data = obj_path
        view_model = form.get_view_model()()
        form.fill_trait(view_model)
        importer.submit_form(form)

        # Launch import Operation
        FlowService().fire_operation(importer, user, project.id, view_model=view_model)

        data_types = FlowService().get_available_datatypes(project.id, SurfaceIndex)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None, "Surface should not be None"
        return surface
Exemple #23
0
    def _importSurface(self, import_file_path=None):
        """
        This method is used for importing data in GIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance 
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.gifti_surface_importer', 'GIFTISurfaceImporter')

        args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: ""}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)

        surface = CorticalSurface()
        data_types = FlowService().get_available_datatypes(self.test_project.id,
                                                           surface.module + "." + surface.type)[0]
        assert 1, len(data_types) == "Project should contain only one data type."

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        assert surface is not None == "TimeSeries should not be none"

        return surface
Exemple #24
0
def fire_simulation(project_id, simulator):
    project = dao.get_project_by_id(project_id)
    assert isinstance(simulator, Simulator)
    # Load the SimulatorAdapter algorithm from DB
    cached_simulator_algorithm = FlowService().get_algorithm_by_module_and_class(IntrospectionRegistry.SIMULATOR_MODULE,
                                                                                 IntrospectionRegistry.SIMULATOR_CLASS)

    # Instantiate a SimulatorService and launch the configured simulation
    simulator_service = SimulatorService()
    launched_operation = simulator_service.async_launch_and_prepare_simulation(None, project.administrator, project,
                                                                               cached_simulator_algorithm, simulator,
                                                                               None)
    LOG.info("Operation launched ....")
    return launched_operation
 def __init__(self, overwrites=None, settings_file=None):
     """ Parameters can be overwritten either from a settigns file or from a dictionary. """
     if overwrites is not None:
         self.overwrites.update(overwrites)
     if settings_file is not None:
         settings = open(sys.argv[1]).read()
         for line in settings.split('\n'):
             key, value = line.split('=')
             self.overwrites[key.strip()] = value.strip()
     if KEY_PROJECT not in self.overwrites:
         raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
     self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
     self.flow_service = FlowService()
     self.operation_service = OperationService()
Exemple #26
0
    def _import_connectivity(self):
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.zip_connectivity_importer',
            'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer,
                                     self.test_user,
                                     self.test_project.id,
                                     uploaded=self.connectivity_path,
                                     Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project,
                                                   Connectivity())
Exemple #27
0
    def _importSurface(self, import_file_path=None):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.obj_importer', 'ObjSurfaceImporter')

        args = {
            'data_file': import_file_path,
            "surface_type": FACE,
            DataTypeMetaData.KEY_SUBJECT: "John"
        }

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        data_types = FlowService().get_available_datatypes(
            self.test_project.id, FaceSurface)[0]
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        surface = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(surface is not None, "Surface should not be None")
        return surface
Exemple #28
0
    def _compute_connectivity_global_params(self, connectivity):
        """
        Returns a dictionary which contains the data needed for drawing a connectivity.

        :param connectivity: the `Connectivity(HasTraits)` object
        """
        conn_gid = connectivity.gid.hex
        path_weights = SurfaceURLGenerator.paths2url(conn_gid,
                                                     'ordered_weights')
        path_pos = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_centres')
        path_tracts = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_tracts')
        path_labels = SurfaceURLGenerator.paths2url(conn_gid, 'ordered_labels')
        path_hemisphere_order_indices = SurfaceURLGenerator.paths2url(
            conn_gid, 'hemisphere_order_indices')

        algo = FlowService().get_algorithm_by_module_and_class(
            CONNECTIVITY_CREATOR_MODULE, CONNECTIVITY_CREATOR_CLASS)
        submit_url = '/{}/{}/{}'.format(SurfaceURLGenerator.FLOW,
                                        algo.fk_category, algo.id)
        global_pages = dict(controlPage="connectivity/top_right_controls")

        minimum, maximum, minimum_non_zero = self._compute_matrix_extrema(
            connectivity.ordered_weights)
        minimum_t, maximum_t, minimum_non_zero_t = self._compute_matrix_extrema(
            connectivity.ordered_tracts)

        global_params = dict(
            urlWeights=path_weights,
            urlPositions=path_pos,
            urlTracts=path_tracts,
            urlLabels=path_labels,
            originalConnectivity=conn_gid,
            title="Connectivity Control",
            submitURL=submit_url,
            positions=connectivity.ordered_centres,
            tractsMin=minimum_t,
            tractsMax=maximum_t,
            weightsMin=minimum,
            weightsMax=maximum,
            tractsNonZeroMin=minimum_non_zero_t,
            weightsNonZeroMin=minimum_non_zero,
            pointsLabels=connectivity.ordered_labels,
            conductionSpeed=connectivity.speed or 1,
            connectivity_entity=connectivity,
            base_selection=connectivity.saved_selection_labels,
            hemisphereOrderUrl=path_hemisphere_order_indices)
        global_params.update(
            self.build_params_for_selectable_connectivity(connectivity))
        return global_params, global_pages
Exemple #29
0
    def test_datatypes_groups(self):
        """
        Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
        """
        flow_service = FlowService()

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 0,
                         "There should be no operation")

        algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3',
                                   'TestAdapter3')
        group, _ = flow_service.prepare_adapter(self.test_project.id,
                                                algogroup)
        adapter_instance = flow_service.build_adapter_instance(group)
        data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
        ## Create Group of operations
        flow_service.fire_operation(adapter_instance, self.test_user,
                                    self.test_project.id, **data)

        all_operations = dao.get_filtered_operations(self.test_project.id,
                                                     None)
        self.assertEqual(len(all_operations), 1,
                         "Expected one operation group")
        self.assertEqual(all_operations[0][2], 2,
                         "Expected 2 operations in group")

        operation_group_id = all_operations[0][3]
        self.assertNotEquals(operation_group_id, None,
                             "The operation should be part of a group.")

        self.operation_service.stop_operation(all_operations[0][0])
        self.operation_service.stop_operation(all_operations[0][1])
        ## Make sure operations are executed
        self.operation_service.launch_operation(all_operations[0][0], False)
        self.operation_service.launch_operation(all_operations[0][1], False)

        resulted_datatypes = dao.get_datatype_in_group(
            operation_group_id=operation_group_id)
        self.assertTrue(
            len(resulted_datatypes) >= 2,
            "Expected at least 2, but: " + str(len(resulted_datatypes)))

        dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
        datatype_group = dao.get_datatypegroup_by_op_group_id(
            operation_group_id)
        self.assertEqual(dt.fk_datatype_group, datatype_group.id,
                         "DataTypeGroup is incorrect")
 def __init__(self):
     BaseController.__init__(self)
     self.flow_service = FlowService()
     self.logger = get_logger(__name__)
     editable_entities = [
         dict(link='/spatial/stimulus/region/step_1_submit/1/1',
              title='Region Stimulus',
              subsection='regionstim',
              description='Create a new Stimulus on Region level'),
         dict(link='/spatial/stimulus/surface/step_1_submit/1/1',
              title='Surface Stimulus',
              subsection='surfacestim',
              description='Create a new Stimulus on Surface level')
     ]
     self.submenu_list = editable_entities