def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(len(stored_datatypes) == 3, "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        self.assertEqual(finished, 3, "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0, "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0, "Some operations finished with error status.")
Ejemplo n.º 2
0
    def test_happy_flow_region_import(self):
        """
        Verifies the happy flow for importing a region.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project,
                                                       ProjectionRegionEEG())
        group = dao.find_group(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixRegionEEGImporter')
        importer = ABCAdapter.build_adapter(group)

        zip_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)),
            'region_conn_74_eeg_1020_62.mat')
        args = {
            'projection_file': zip_path,
            'dataset_name': 'ProjectionMatrix',
            'connectivity': self.connectivity.gid,
            'sensors': self.sensors.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }

        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project,
                                                      ProjectionRegionEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Ejemplo n.º 3
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        ### Make sure Connectivity is in DB
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = dao.get_generic_entity(Connectivity, 'John Doe',
                                                   'subject')[0]

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        algorithms = dao.get_generic_entity(model.Algorithm,
                                            'Brain Connectivity Toolbox',
                                            'group_description')
        self.assertTrue(algorithms is not None)
        self.assertTrue(len(algorithms) > 5)

        self.bct_adapters = []
        for algo in algorithms:
            self.bct_adapters.append(ABCAdapter.build_adapter(algo))
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(len(stored_datatypes) == 2, "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0].type == 'Datatype1', "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1].type == 'Datatype1', "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        self.assertEqual(finished, 3, "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0, "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0, "Some operations finished with error status.")
    def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),  'eeg_unitvector_62.txt.bz2')
        
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = ['tsNames', 'groupedLabels', 'tsModes', 'tsStateVars', 'longestChannelLength',
                         'label_x', 'entities', 'page_size', 'number_of_visible_points',
                         'extended_view', 'initialSelection', 'ag_settings', 'ag_settings']

        for key in expected_keys:
            self.assertTrue(key in result, "key not found %s" % key)

        expected_ag_settings = ['channelsPerSet', 'channelLabels', 'noOfChannels', 'translationStep',
                                'normalizedSteps', 'nan_value_found', 'baseURLS', 'pageSize',
                                'nrOfPages', 'timeSetPaths', 'totalLength', 'number_of_visible_points',
                                'extended_view', 'measurePointsSelectionGIDs']

        ag_settings = json.loads(result['ag_settings'])

        for key in expected_ag_settings:
            self.assertTrue(key in ag_settings, "ag_settings should have the key %s" % key)
Ejemplo n.º 6
0
 def test_get_available_bursts_happy(self):
     """
     Test that all the correct burst are returned for the given project.
     """
     project = model.Project("second_test_proj", self.test_user.id,
                             "description")
     second_project = dao.store_entity(project)
     test_project_bursts = [
         TestFactory.store_burst(self.test_project.id).id for _ in xrange(4)
     ]
     second_project_bursts = [
         TestFactory.store_burst(second_project.id).id for _ in xrange(3)
     ]
     returned_test_project_bursts = [
         burst.id for burst in self.burst_service.get_available_bursts(
             self.test_project.id)
     ]
     returned_second_project_bursts = [
         burst.id for burst in self.burst_service.get_available_bursts(
             second_project.id)
     ]
     self.assertEqual(
         len(test_project_bursts), len(returned_test_project_bursts),
         "Incorrect bursts retrieved for project %s." % self.test_project)
     self.assertEqual(
         len(second_project_bursts), len(returned_second_project_bursts),
         "Incorrect bursts retrieved for project %s." % second_project)
     self.assertEqual(
         set(second_project_bursts), set(returned_second_project_bursts),
         "Incorrect bursts retrieved for project %s." % second_project)
     self.assertEqual(
         set(test_project_bursts), set(returned_test_project_bursts),
         "Incorrect bursts retrieved for project %s." % self.test_project)
Ejemplo n.º 7
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        ### Make sure Connectivity is in DB
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        self.connectivity = dao.get_generic_entity(Connectivity, 'John Doe',
                                                   'subject')[0]

        # make weights matrix symmetric, or else some BCT algorithms will run infinitely:
        w = self.connectivity.weights
        self.connectivity.weights = w + w.T - numpy.diag(w.diagonal())

        self.algo_groups = dao.get_generic_entity(model.AlgorithmGroup,
                                                  'MatlabAdapter', 'classname')

        self.assertTrue(self.algo_groups is not None)
        self.assertEquals(6, len(self.algo_groups))
        self.bct_adapters = []
        for group in self.algo_groups:
            self.bct_adapters.append(
                TestFactory.create_adapter(group, self.test_project))
 def test_create_workflow(self):
     """
     Test that a workflow with all the associated workflow steps is actually created.
     """
     workflow_step_list = [
         TestFactory.create_workflow_step(
             "tvb.tests.framework.adapters.testadapter2",
             "TestAdapter2",
             step_index=1,
             static_kwargs={"test2": 2}),
         TestFactory.create_workflow_step(
             "tvb.tests.framework.adapters.testadapter1",
             "TestAdapter1",
             step_index=2,
             static_kwargs={
                 "test1_val1": 1,
                 "test1_val2": 1
             })
     ]
     burst_id = self.__create_complex_workflow(workflow_step_list)
     workflow_entities = dao.get_workflows_for_burst(burst_id)
     self.assertTrue(
         len(workflow_entities) == 1,
         "For some reason workflow was not stored in database.")
     workflow_steps = dao.get_workflow_steps(workflow_entities[0].id)
     self.assertEqual(len(workflow_steps),
                      len(workflow_step_list) + 1,
                      "Wrong number of workflow steps created.")
Ejemplo n.º 9
0
    def test_import_bold(self):
        ### Retrieve Adapter instance
        importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.mat_timeseries_importer',
            'MatTimeSeriesImporter')

        args = dict(
            data_file=self.bold_path,
            dataset_name='QL_20120824_DK_BOLD_timecourse',
            structure_path='',
            transpose=False,
            slice=None,
            sampling_rate=1000,
            start_time=0,
            tstype='region',
            tstype_parameters_option_region_connectivity=self.connectivity.gid,
            Data_Subject="QL")

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion())

        self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
 def test_create_project_happy_flow(self):
     """
     Standard flow for creating a new project.
     """
     user1 = TestFactory.create_user('test_user1')
     user2 = TestFactory.create_user('test_user2')
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0,
                      "Database reset probably failed!")
     TestFactory.create_project(self.test_user,
                                'test_project',
                                users=[user1.id, user2.id])
     resulting_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(resulting_projects), 1,
                      "Project with valid data not inserted!")
     project = resulting_projects[0]
     if project.name == "test_project":
         self.assertEqual(project.description, "description",
                          "Description do no match")
         users_for_project = dao.get_members_of_project(project.id)
         for user in users_for_project:
             self.assertTrue(user.id in [user1.id, user2.id],
                             "Users not stored properly.")
     self.assertTrue(
         os.path.exists(
             os.path.join(TvbProfile.current.TVB_STORAGE,
                          FilesHelper.PROJECTS_FOLDER, "test_project")),
         "Folder for project was not created")
Ejemplo n.º 11
0
    def init(self, with_data=True, user_role="test"):
        """
        Have a different name than setUp so we can use it safely in transactions and it will
        not be called before running actual test.
        Using setUp inheritance here won't WORK!! See TransactionalTest
        """
        cherrypy.session = BaseControllersTest.CherrypySession()

        if with_data:
            # Add 3 entries so we no longer consider this the first run.
            TvbProfile.current.manager.add_entries_to_config_file({
                'test':
                'test',
                'test1':
                'test1',
                'test2':
                'test2'
            })
            self.test_user = TestFactory.create_user(username="******",
                                                     role=user_role)
            self.test_project = TestFactory.create_project(
                self.test_user, "Test")

            cherrypy.session[KEY_USER] = self.test_user
            cherrypy.session[KEY_PROJECT] = self.test_project
Ejemplo n.º 12
0
 def setUp(self):
     """
     Set up the context needed by the tests.
     """
     self.files_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user, self.PROJECT_NAME)
Ejemplo n.º 13
0
    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)    
Ejemplo n.º 14
0
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        ConnectivityZipTest.import_test_connectivity96(self.test_user,
                                                       self.test_project,
                                                       subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())

        self._import_csv_test_connectivity(reference_connectivity.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        self.assertEqual(dt_count_before + 1, dt_count_after)

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        # check relationship between the imported connectivity and the reference
        self.assertTrue((reference_connectivity.centres == imported_connectivity.centres).all())
        self.assertTrue((reference_connectivity.orientations == imported_connectivity.orientations).all())

        self.assertEqual(reference_connectivity.number_of_regions, imported_connectivity.number_of_regions)
        self.assertTrue((reference_connectivity.region_labels == imported_connectivity.region_labels).all())

        self.assertFalse((reference_connectivity.weights == imported_connectivity.weights).all())
        self.assertFalse((reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all())
    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(initial_projects), 0, "Database was not reset!")
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(
                TestFactory.create_project(self.test_user if i < 3 else user1,
                                           'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(
            test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(
            model.DataType(module="test_data",
                           subject="subj1",
                           state="test_state",
                           operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(
            self.test_user.id, str(datatype.id))[0]
        self.assertEqual(len(linkable), 2,
                         "Wrong count of link-able projects!")
        proj_names = [project.name for project in linkable]
        self.assertTrue(test_proj[1].name in proj_names)
        self.assertTrue(test_proj[2].name in proj_names)
        self.assertFalse(test_proj[3].name in proj_names)
    def test_happy_flow_surface_import(self):
        """
        Verifies the happy flow for importing a surface.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())
        group = dao.find_group(
            "tvb.adapters.uploaders.projection_matrix_importer", "ProjectionMatrixSurfaceEEGImporter"
        )
        importer = ABCAdapter.build_adapter(group)

        file_path = os.path.join(
            os.path.abspath(os.path.dirname(dataset.__file__)), "projection_eeg_65_surface_16k.npy"
        )
        args = {
            "projection_file": file_path,
            "dataset_name": "ProjectionMatrix",
            "sensors": self.sensors.gid,
            "surface": self.surface.gid,
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT,
        }

        FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args)
        dt_count_after = TestFactory.get_entity_count(self.test_project, ProjectionSurfaceEEG())

        self.assertEqual(dt_count_before + 1, dt_count_after)
Ejemplo n.º 17
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.test_user = TestFactory.create_user('CFF_User')
     self.test_project = TestFactory.create_project(self.test_user,
                                                    "CFF_Project")
Ejemplo n.º 18
0
 def test_editresultfigures_remove_session(self):
     """
     Tests result dictionary has the expected keys / values and call to `editresultfigures`
     correctly redirects to '/project/figure/displayresultfigures' on session removal
     """
     cherrypy.request.method = 'POST'
     TestFactory.create_figure(self.operation.id,
                               self.test_user.id,
                               self.test_project.id,
                               name="figure1",
                               path="path-to-figure1",
                               session_name="test")
     TestFactory.create_figure(self.operation.id,
                               self.test_user.id,
                               self.test_project.id,
                               name="figure2",
                               path="path-to-figure2",
                               session_name="test")
     figs, _ = dao.get_previews(self.test_project.id, self.test_user.id,
                                "test")
     self.assertEqual(len(figs['test']), 2)
     data = {'old_session_name': 'test', 'new_session_name': 'test_renamed'}
     self._expect_redirect('/project/figure/displayresultfigures',
                           self.figure_c.editresultfigures,
                           remove_session=True,
                           **data)
     figs, previews = dao.get_previews(self.test_project.id,
                                       self.test_user.id, "test")
     self.assertEqual(len(figs['test']), 0)
     self.assertEqual(previews, {})
 def test_usermanagement_post_valid(self):
     """
     Create a valid post and check that user is created.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[common.KEY_USER] = self.test_user
     TestFactory.create_user(username="******")
     TestFactory.create_user(username="******", validated=False)
     user_before_delete = dao.get_user_by_name("to_be_deleted")
     self.assertTrue(user_before_delete is not None)
     user_before_validation = dao.get_user_by_name("to_validate")
     self.assertFalse(user_before_validation.validated)
     data = {
         "delete_%i" % user_before_delete.id: True,
         "role_%i" % user_before_validation.id: "ADMINISTRATOR",
         "validate_%i" % user_before_validation.id: True
     }
     self.user_c.usermanagement(do_persist=True, **data)
     user_after_delete = dao.get_user_by_id(user_before_delete.id)
     self.assertTrue(user_after_delete is None, "User should be deleted.")
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     self.assertTrue(user_after_validation.validated,
                     "User should be validated now.")
     self.assertTrue(user_after_validation.role == "ADMINISTRATOR",
                     "Role has not changed.")
Ejemplo n.º 20
0
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)
        ### Insert some starting data in the database.
        categ1 = model.AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        categ2 = model.AlgorithmCategory('two', rawinput=True)
        self.categ2 = dao.store_entity(categ2)

        group1 = model.AlgorithmGroup("test_module1", "classname1", categ1.id)
        self.algo_group1 = dao.store_entity(group1)
        group2 = model.AlgorithmGroup("test_module2", "classname2", categ2.id)
        self.algo_group2 = dao.store_entity(group2)
        group3 = model.AlgorithmGroup("test_module3", "classname3", categ1.id)
        self.algo_group3 = dao.store_entity(group3)

        group_v = model.AlgorithmGroup(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS, categ2.id)
        self.algo_group_v = dao.store_entity(group_v)

        algo_v = model.Algorithm(self.algo_group_v.id, 'ident', name='', req_data='', param_name='', output='')
        self.algorithm_v = dao.store_entity(algo_v)

        algo1 = model.Algorithm(self.algo_group1.id, 'id', name='', req_data='', param_name='', output='')
        self.algorithm1 = dao.store_entity(algo1)
Ejemplo n.º 21
0
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        self.assertRaises(OperationException, self._import_csv_test_connectivity,
                          bad_reference_connectivity.gid, TEST_SUBJECT_A)
Ejemplo n.º 22
0
 def setUp(self):
     """
     Set up the context needed by the tests.
     """
     self.files_helper = FilesHelper()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user,
                                                    self.PROJECT_NAME)
Ejemplo n.º 23
0
 def test_remove_project_wrong_id(self):
     """
     Flow for deleting a project giving an un-existing id.
     """
     TestFactory.create_project(self.test_user, 'test_proj')
     projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(projects), 1, "Initializations failed!") 
     self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99)   
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user and a test project, saves old configuration and imports a CFF data-set
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
 def _store_users_happy_flow(self, n_users, prefix=""):
     """
     Store users in happy flow. In this case the transaction should just be commited properly and changes
     should be visible in database.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user(prefix + 'test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
Ejemplo n.º 26
0
 def _store_users_happy_flow(self, n_users, prefix=""):
     """
     Store users in happy flow. In this case the transaction should just be commited properly and changes
     should be visible in database.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user(prefix + 'test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
Ejemplo n.º 27
0
    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(count, 3, "Problems with inserting data")
        first_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[1])
        count = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            first_filter)[1]
        self.assertEqual(count, 2, "Data was not filtered")

        second_filter = FilterChain(
            fields=[FilterChain.datatype + '._nr_dimensions'],
            operations=["=="],
            values=[2])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            second_filter)[0]
        self.assertEqual(len(filtered_data), 1, "Data was not filtered")
        self.assertEqual(filtered_data[0][3], "John Doe 3")

        third_filter = FilterChain(
            fields=[FilterChain.datatype + '._length_1d'],
            operations=["=="],
            values=[3])
        filtered_data = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray",
            third_filter)[0]
        self.assertEqual(len(filtered_data), 1,
                         "Data was not filtered correct")
        self.assertEqual(filtered_data[0][3], "John Doe 3")
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass
Ejemplo n.º 28
0
 def setUp(self):
     """
     Set up any additionally needed parameters.
     """
     self.clean_database()
     super(GenshiTestNDimensionArray, self).setUp()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
Ejemplo n.º 29
0
 def test_retrieve_projects_page2(self):
     """
     Test for retrieving the second page projects for a given user.
     """
     for i in range(PROJECTS_PAGE_SIZE + 3):
         TestFactory.create_project(self.test_user, 'test_proj' + str(i))
     projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
     self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.")
     self.assertEqual(pages, 2, 'Wrong number of pages retrieved.')
Ejemplo n.º 30
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     self.flow_service = FlowService()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(admin=self.test_user)
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
 def test_remove_project_wrong_id(self):
     """
     Flow for deleting a project giving an un-existing id.
     """
     TestFactory.create_project(self.test_user, 'test_proj')
     projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(projects), 1, "Initializations failed!")
     self.assertRaises(ProjectServiceException,
                       self.project_service.remove_project, 99)
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user and a test project, saves old configuration and imports a CFF data-set
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     TestFactory.import_cff(test_user=self.test_user,
                            test_project=self.test_project)
Ejemplo n.º 33
0
    def _import_connectivity(self):
        importer = TestFactory.create_adapter('tvb.adapters.uploaders.zip_connectivity_importer',
                                              'ZIPConnectivityImporter')

        ### Launch Operation
        FlowService().fire_operation(importer, self.test_user, self.test_project.id,
                                     uploaded=self.connectivity_path, Data_Subject='QL')

        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())
        
        ConnectivityZipTest.import_test_connectivity96(self.test_user, self.test_project)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        self.assertEqual(dt_count_before + 1, dt_count_after)
Ejemplo n.º 35
0
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(model.Algorithm(TEST_ADAPTER_VALID_MODULE,
                                                          TEST_ADAPTER_VALID_CLASS, category.id))
Ejemplo n.º 36
0
 def _store_users_raises_exception(self, n_users):
     """
     Store users but at the end raise an exception. In case the exception is not handled up until the
     transactional decorator, all changes should be rolled back.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user('test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
     raise Exception("This is just so transactional kicks in and a rollback should be done.")
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     initialize_storage()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation_service = OperationService()
     self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
Ejemplo n.º 38
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     initialize_storage()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation_service = OperationService()
     self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
Ejemplo n.º 39
0
 def setUp(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     self.flow_service = FlowService()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(admin=self.test_user)
     self.operation = TestFactory.create_operation(
         test_user=self.test_user, test_project=self.test_project)
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.test_user = TestFactory.create_user("UserRM")
     self.test_project = TestFactory.import_default_project(self.test_user)
     self.connectivity = self._get_entity(Connectivity())
     self.surface = self._get_entity(CorticalSurface())
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.test_user = TestFactory.create_user("UserRM")
     self.test_project = TestFactory.import_default_project(self.test_user)
     self.connectivity = self._get_entity(Connectivity)
     self.surface = self._get_entity(CorticalSurface)
Ejemplo n.º 42
0
 def test_happy_flow(self):
     self.assertEqual(
         0,
         TestFactory.get_entity_count(self.test_project,
                                      ConnectivityMeasure()))
     self._import('mantini_networks.mat')
     self.assertEqual(
         6,
         TestFactory.get_entity_count(self.test_project,
                                      ConnectivityMeasure()))
 def _store_users_raises_exception(self, n_users):
     """
     Store users but at the end raise an exception. In case the exception is not handled up until the
     transactional decorator, all changes should be rolled back.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user('test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
     raise Exception("This is just so transactional kicks in and a rollback should be done.")
Ejemplo n.º 44
0
 def setUp(self):
     zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                             'connectivity', 'connectivity_66.zip')
     self.test_user = TestFactory.create_user('Test_User')
     self.test_project = TestFactory.create_project(self.test_user,
                                                    "Test_Project")
     TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                         "John", zip_path)
     self.connectivity = TestFactory.get_entity(self.test_project,
                                                Connectivity())
    def setUp(self):
        """
        Reset the database before each test.
        """
        self.test_user = TestFactory.create_user("UserPM")
        self.test_project = TestFactory.create_project(self.test_user)

        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__),
                                'eeg_brainstorm_65.txt')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path,
                                   Sensors_Importer.EEG_SENSORS)

        zip_path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__),
                                'cortex_16384.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project,
                                       zip_path, CORTICAL, True)

        self.surface = TestFactory.get_entity(self.test_project,
                                              CorticalSurface())
        self.assertTrue(self.surface is not None)
        self.sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        self.assertTrue(self.sensors is not None)

        self.importer = TestFactory.create_adapter(
            'tvb.adapters.uploaders.projection_matrix_importer',
            'ProjectionMatrixSurfaceEEGImporter')
 def test_retrieve_3projects_3usr(self):
     """
     Three users, 3 projects. Structure of db:
     proj1: {admin: user1, members: [user2, user3]}
     proj2: {admin: user2, members: [user1]}
     proj3: {admin: user3, members: [user1, user2]}
     Check valid project returns for all the users.
     """
     member1 = TestFactory.create_user("member1")
     member2 = TestFactory.create_user("member2")
     member3 = TestFactory.create_user("member3")
     TestFactory.create_project(member1,
                                'TestProject1',
                                users=[member2.id, member3.id])
     TestFactory.create_project(member2, 'TestProject2', users=[member1.id])
     TestFactory.create_project(member3,
                                'TestProject3',
                                users=[member1.id, member2.id])
     projects = self.project_service.retrieve_projects_for_user(
         member1.id, 1)[0]
     self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
     projects = self.project_service.retrieve_projects_for_user(
         member2.id, 1)[0]
     self.assertEqual(len(projects), 3, "Projects not retrieved properly!")
     projects = self.project_service.retrieve_projects_for_user(
         member3.id, 1)[0]
     self.assertEqual(len(projects), 2, "Projects not retrieved properly!")
Ejemplo n.º 47
0
    def setUp(self):
        """ Prepare some entities to work with during tests:"""

        self.flow_service = FlowService()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(admin=self.test_user)

        category = dao.get_uploader_categories()[0]
        self.algorithm = dao.store_entity(
            model.Algorithm(TEST_ADAPTER_VALID_MODULE,
                            TEST_ADAPTER_VALID_CLASS, category.id))
Ejemplo n.º 48
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
Ejemplo n.º 49
0
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user,
                               test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(
            self.test_project, Connectivity(), filters)

        self.assertRaises(OperationException,
                          self._import_csv_test_connectivity,
                          bad_reference_connectivity.gid, TEST_SUBJECT_A)
    def setUp(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
    def setUp(self):
        _, self.connectivity = DatatypesFactory().create_connectivity()
        self.test_user = TestFactory.create_user(username="******")
        self.test_project = TestFactory.create_project(self.test_user, "Test")

        burst_conf = BurstConfiguration(self.test_project.id)
        burst_conf._simulator_configuration = self.CONF_HOPFIELD_HEUN_STOCH_RANGES
        burst_conf.prepare_after_load()
        burst_conf.simulator_configuration['connectivity'] = {'value': self.connectivity.gid}

        self.s_manager = SerializationManager(burst_conf)
        self.empty_manager = SerializationManager(BurstConfiguration(None))
 def test_getmemberspage(self):
     """
     Get the first page of the members page.
     """
     users_count = dao.get_all_users(is_count=True)
     user = TestFactory.create_user('usr', 'pass')
     test_project = TestFactory.create_project(user, 'new_name')
     result = self.project_c.getmemberspage(0, test_project.id)
     self.assertEqual(result['usersMembers'], [])
     # Same users as before should be available since we created new one
     # as owned for the project.
     self.assertEqual(len(result['usersList']), users_count)
 def setUp(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.datatypeFactory = DatatypesFactory()
     self.test_project = self.datatypeFactory.get_project()
     self.test_user = self.datatypeFactory.get_user()
     
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
     self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
     self.assertTrue(self.connectivity is not None)
 def test_viewall_valid_data(self):
     """
     Create a bunch of projects and check that they are returned correctly.
     """
     project1 = TestFactory.create_project(self.test_user, 'prj1')
     TestFactory.create_project(self.test_user, 'prj2')
     TestFactory.create_project(self.test_user, 'prj3')
     result = self.project_c.viewall(selected_project_id=project1.id)
     projects_list = result['projectsList']
     ## Use this old version of SET builder, otherwise it will fain on Python 2.6
     self.assertEqual(set([prj.name for prj in projects_list]), {'prj1', 'prj2', 'prj3', 'Test'})
     self.assertEqual(result['page_number'], 1)
     self.assertEqual(result[common.KEY_PROJECT].name, 'prj1')
    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        self.assertEqual(0, all_operations, "There should be no operation.")
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]
Ejemplo n.º 56
0
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        ## Import Shelf Face Object
        zip_path = os.path.join(os.path.dirname(surfaces_dataset.__file__), 'face_surface_old.zip')
        TestFactory.import_surface_zip(self.test_user, self.test_project, zip_path, FACE, True)
Ejemplo n.º 57
0
    def test_launch_internal(self):
        """
        Check that all required keys are present in output from InternalSensorViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__), 'internal_39.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.INTERNAL_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsInternal())

        viewer = InternalSensorViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)
    def setUp(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)