def test_launch(self):
        """
        Check that all required keys are present in output from BrainViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(sensors_dataset.__file__),  'eeg_unitvector_62.txt.bz2')
        
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, 'EEG Sensors')
        sensors = TestFactory.get_entity(self.test_project, SensorsEEG())
        time_series = self.datatypeFactory.create_timeseries(self.connectivity, 'EEG', sensors)
        viewer = EegMonitor()
        result = viewer.launch(time_series)
        expected_keys = ['tsNames', 'groupedLabels', 'tsModes', 'tsStateVars', 'longestChannelLength',
                         'label_x', 'entities', 'page_size', 'number_of_visible_points',
                         'extended_view', 'initialSelection', 'ag_settings', 'ag_settings']

        for key in expected_keys:
            assert key in result, "key not found %s" % key

        expected_ag_settings = ['channelsPerSet', 'channelLabels', 'noOfChannels', 'translationStep',
                                'normalizedSteps', 'nan_value_found', 'baseURLS', 'pageSize',
                                'nrOfPages', 'timeSetPaths', 'totalLength', 'number_of_visible_points',
                                'extended_view', 'measurePointsSelectionGIDs']

        ag_settings = json.loads(result['ag_settings'])

        for key in expected_ag_settings:
            assert key in ag_settings, "ag_settings should have the key %s" % key
    def test_happy_flow_import(self):
        """
        Test that importing a CFF generates at least one DataType in DB.
        """
        TestConnectivityZip.import_test_connectivity96(self.test_user,
                                                       self.test_project,
                                                       subject=TEST_SUBJECT_A)

        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['=='])
        reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        dt_count_before = TestFactory.get_entity_count(self.test_project, Connectivity())

        self._import_csv_test_connectivity(reference_connectivity.gid, TEST_SUBJECT_B)

        dt_count_after = TestFactory.get_entity_count(self.test_project, Connectivity())
        assert dt_count_before + 1 == dt_count_after

        filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like'])
        imported_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        # check relationship between the imported connectivity and the reference
        assert (reference_connectivity.centres == imported_connectivity.centres).all()
        assert (reference_connectivity.orientations == imported_connectivity.orientations).all()

        assert reference_connectivity.number_of_regions == imported_connectivity.number_of_regions
        assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()

        assert not (reference_connectivity.weights == imported_connectivity.weights).all()
        assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all()
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 2, "DataType from second step was not stored."
        assert  stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
        assert  stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didnt start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didnt finish."
        assert  error == 0, "Some operations finished with error status."
    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 3, "DataType from all step were not stored."
        for result_row in stored_datatypes:
            assert  result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didn't start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didn't finish."
        assert  error == 0, "Some operations finished with error status."
    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset!"
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))

        project_storage = self.structure_helper.get_project_folder(test_proj[0])

        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])

        project_storage = os.path.join(project_storage, str(operation.id))
        os.makedirs(project_storage)
        datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", 
                                                   state="test_state", operation_id=operation.id))
        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]
        assert len(linkable) == 2, "Wrong count of link-able projects!"
        proj_names = [project.name for project in linkable]
        assert test_proj[1].name in proj_names
        assert test_proj[2].name in proj_names
        assert not test_proj[3].name in proj_names
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a test user and a test project, saves old configuration and imports a CFF data-set
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
    def test_bad_reference(self):
        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        field = FilterChain.datatype + '.subject'
        filters = FilterChain('', [field], [TEST_SUBJECT_A], ['!='])
        bad_reference_connectivity = TestFactory.get_entity(self.test_project, Connectivity(), filters)

        with pytest.raises(OperationException):
            self._import_csv_test_connectivity(bad_reference_connectivity.gid, TEST_SUBJECT_A)
Beispiel #8
0
 def setup_method(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     self.flow_service = FlowService()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(admin=self.test_user)
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
 def _store_users_happy_flow(self, n_users, prefix=""):
     """
     Store users in happy flow. In this case the transaction should just be commited properly and changes
     should be visible in database.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user(prefix + 'test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
 def test_remove_project_wrong_id(self):
     """
     Flow for deleting a project giving an un-existing id.
     """
     TestFactory.create_project(self.test_user, 'test_proj')
     projects = dao.get_projects_for_user(self.test_user.id)
     assert len(projects) == 1, "Initializations failed!"
     with pytest.raises(ProjectServiceException):
         self.project_service.remove_project(99)   
 def setup_method(self):
     """
     Set up any additionally needed parameters.
     """
     self.clean_database()
     super(TestGenshiNDimensionArray, self).setup_method()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
 def test_retrieve_projects_page2(self):
     """
     Test for retrieving the second page projects for a given user.
     """
     for i in range(PROJECTS_PAGE_SIZE + 3):
         TestFactory.create_project(self.test_user, 'test_proj' + str(i))
     projects, pages = self.project_service.retrieve_projects_for_user(self.test_user.id, 2)
     assert len(projects) == (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper."
     assert pages == 2, 'Wrong number of pages retrieved.'
 def setup_method(self):
     """
     Reset the database before each test.
     """
     self.clean_database()
     initialize_storage()
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.operation_service = OperationService()
     self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
 def _store_users_raises_exception(self, n_users):
     """
     Store users but at the end raise an exception. In case the exception is not handled up until the
     transactional decorator, all changes should be rolled back.
     
     :param n_users: number of users to be stored by this method
     """
     for idx in range(n_users):
         TestFactory.create_user('test_user' + str(idx), 'pass', '*****@*****.**', True, 'test')
     raise Exception("This is just so transactional kicks in and a rollback should be done.")
 def transactional_setup_method(self):
     """
     Sets up the environment for running the tests;
     creates a test user, a test project, a connectivity and a surface;
     imports a CFF data-set
     """
     self.test_user = TestFactory.create_user("UserRM")
     self.test_project = TestFactory.import_default_project(self.test_user)
     self.connectivity = self._get_entity(Connectivity)
     self.surface = self._get_entity(CorticalSurface)
    def setup_method(self):
        """
        Sets up the environment for running the tests;
        cleans the database before testing and saves config file;
        creates a test user, a test project;
        creates burst, flow, operation and workflow services

        """
        self.clean_database()
        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
    def transactional_setup_method(self):
        _, self.connectivity = DatatypesFactory().create_connectivity()
        self.test_user = TestFactory.create_user(username="******")
        self.test_project = TestFactory.create_project(self.test_user, "Test")

        burst_conf = BurstConfiguration(self.test_project.id)
        burst_conf._simulator_configuration = self.CONF_HOPFIELD_HEUN_STOCH_RANGES
        burst_conf.prepare_after_load()
        burst_conf.simulator_configuration['connectivity'] = {'value': self.connectivity.gid}

        self.s_manager = SerializationManager(burst_conf)
        self.empty_manager = SerializationManager(BurstConfiguration(None))
 def test_getmemberspage(self):
     """
     Get the first page of the members page.
     """
     users_count = dao.get_all_users(is_count=True)
     user = TestFactory.create_user('usr', 'pass')
     test_project = TestFactory.create_project(user, 'new_name')
     result = self.project_c.getmemberspage(0, test_project.id)
     assert result['usersMembers'] == []
     # Same users as before should be available since we created new one
     # as owned for the project.
     assert len(result['usersList']) == users_count
 def _store_users_nested(self, n_users, inner_trans_func):
     """
     This method stores n_users, after which it calls inner_trans_func with n_users as parameter.
     At the end it raises an exception so transaction will fail.
     All changes should be reverted regardless if inner_trans_func succeeds or fails.
     
     :param n_users: number of users to be stored both by this method and by the passed inner_trans_func
     :param inner_trans_func: either _store_users_happy_flow or _store_users_raises_exception
     """
     for idx in range(n_users):
         TestFactory.create_user('test_user_nested' + str(idx), 'pass', '*****@*****.**', True, 'test')
     inner_trans_func(n_users)
     raise Exception("This is just so transactional kicks in and a rollback should be done.")
 def transactional_setup_method(self):
     """
     Sets up the testing environment;
     saves config file;
     creates a test user, a test project;
     creates burst, operation, flow and workflow services
     """
     self.test_user = TestFactory.create_user()
     self.test_project = TestFactory.create_project(self.test_user)
     self.workflow_service = WorkflowService()
     self.burst_service = BurstService()
     self.operation_service = OperationService()
     self.flow_service = FlowService()
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()

        TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
        self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
        assert self.connectivity is not None
    def _create_datatype_group(self):
        """
        Creates a project, one DataTypeGroup with 2 DataTypes into the new group.
        """
        test_project = TestFactory.create_project(self.test_user, "NewProject")

        all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        assert 0 == all_operations, "There should be no operation."
        
        datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project)
        dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id)

        return test_project, dt_group.id, datatypes[0], datatypes[1]
    def transactional_setup_method(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()
        self.flow_service = FlowService()
        self.structure_helper = FilesHelper()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a surface;
        imports a CFF data-set
        """
        self.factory = DatatypesFactory()
        self.test_project = self.factory.get_project()
        self.test_user = self.factory.get_user()

        ## Import Shelf Face Object
        face_path = os.path.join(os.path.dirname(tvb_data.obj.__file__), 'face_surface.obj')
        TestFactory.import_surface_obj(self.test_user, self.test_project, face_path, FACE)
    def test_launch_internal(self):
        """
        Check that all required keys are present in output from InternalSensorViewer launch.
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.sensors.__file__), 'seeg_39.txt.bz2')
        TestFactory.import_sensors(self.test_user, self.test_project, zip_path, Sensors_Importer.INTERNAL_SENSORS)
        sensors = TestFactory.get_entity(self.test_project, SensorsInternal())

        viewer = SensorsViewer()
        viewer.current_project_id = self.test_project.id

        result = viewer.launch(sensors)
        self.assert_compliant_dictionary(self.EXPECTED_KEYS_INTERNAL, result)
 def test_viewall_valid_data(self):
     """
     Create a bunch of projects and check that they are returned correctly.
     """
     project1 = TestFactory.create_project(self.test_user, 'prj1')
     TestFactory.create_project(self.test_user, 'prj2')
     TestFactory.create_project(self.test_user, 'prj3')
     result = self.project_c.viewall(selected_project_id=project1.id)
     projects_list = result['projectsList']
     ## Use this old version of SET builder, otherwise it will fain on Python 2.6
     assert set([prj.name for prj in projects_list]) == {'prj1', 'prj2', 'prj3', 'Test'}
     assert result['page_number'] == 1
     assert result[common.KEY_PROJECT].name == 'prj1'
 def test_retrieve_1project_3usr(self):
     """
     One user as admin, two users as members, getting projects for admin and for any of
     the members should return one.
     """
     member1 = TestFactory.create_user("member1")
     member2 = TestFactory.create_user("member2")
     TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id])
     projects = self.project_service.retrieve_projects_for_user(self.test_user.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
     projects = self.project_service.retrieve_projects_for_user(member1.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
     projects = self.project_service.retrieve_projects_for_user(member2.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
    def test_displayresultfigures(self):
        """
        Tests result dictionary for the expected key/value
        """
        figure1 = TestFactory.create_figure(self.operation.id, self.test_user.id, 
                                            self.test_project.id, name="figure1", 
                                            path="path-to-figure1", session_name="test")
        figure2 = TestFactory.create_figure(self.operation.id, self.test_user.id, 
                                            self.test_project.id, name="figure2", 
                                            path="path-to-figure2", session_name="test")

        result_dict = self.figure_c.displayresultfigures()
        figures = result_dict['selected_sessions_data']['test']
        assert set([fig.id for fig in figures]) == {figure1.id, figure2.id}
 def test_validate_valid(self):
     """
     Pass a valid user and test that it is actually validate.
     """
     self.test_user.role = "ADMINISTRATOR"
     self.test_user = dao.store_entity(self.test_user)
     cherrypy.session[common.KEY_USER] = self.test_user
     TestFactory.create_user(username="******", validated=False)
     user_before_validation = dao.get_user_by_name("to_validate")
     assert not user_before_validation.validated
     self._expect_redirect('/tvb', self.user_c.validate, user_before_validation.username)
     user_after_validation = dao.get_user_by_id(user_before_validation.id)
     assert user_after_validation.validated, "User should be validated."
     assert cherrypy.session[common.KEY_MESSAGE_TYPE] == common.TYPE_INFO
 def test_load_burst(self):
     """ 
     Test that the load burst works properly. NOTE: this method is also tested
     in the actual burst launch tests. This is just basic test to verify that the simulator
     interface is loaded properly.
     """
     burst_config = TestFactory.store_burst(self.test_project.id)
     loaded_burst = self.burst_service.load_burst(burst_config.id)[0]
     assert loaded_burst.simulator_configuration == {}, "No simulator configuration should have been loaded"
     assert burst_config.fk_project == loaded_burst.fk_project, "Loaded burst different from original one."
     burst_config = TestFactory.store_burst(self.test_project.id, simulator_config={"test": "test"})
     loaded_burst, _ = self.burst_service.load_burst(burst_config.id)
     assert loaded_burst.simulator_configuration == {"test": "test"}, "different burst loaded"
     assert burst_config.fk_project == loaded_burst.fk_project, "Loaded burst different from original one."
Beispiel #31
0
    def transactional_setup_method(self):
        """
        Prepare the database before each test.
        """
        self.import_service = ImportService()
        self.flow_service = FlowService()
        self.project_service = ProjectService()
        self.test_user = TestFactory.create_user()

        self.delete_project_folders()
        result = self.count_all_entities(DataType)
        assert 0 == result, "There should be no data type in DB"
        result = self.count_all_entities(Project)
        assert 0 == result

        self.test_project = TestFactory.import_default_project(self.test_user)
        self.operation = TestFactory.create_operation(
            test_user=self.test_user, test_project=self.test_project)
        self.adapter_instance = TestFactory.create_adapter()
Beispiel #32
0
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user, "BCT-Project")
        # Make sure Connectivity is in DB
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_76.zip')
        self.connectivity = TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path)

        algorithms = dao.get_generic_entity(Algorithm, 'Brain Connectivity Toolbox', 'group_description')
        assert algorithms is not None
        assert len(algorithms) > 5

        self.bct_adapters = []
        for algo in algorithms:
            self.bct_adapters.append(ABCAdapter.build_adapter(algo))
Beispiel #33
0
 def test_import_surf_zip(self):
     surface = TestFactory.import_surface_zip(
         self.test_user,
         self.test_project,
         self.surf_skull,
         SurfaceTypesEnum.CORTICAL_SURFACE,
         same_process=False)
     assert 4096 == surface.number_of_vertices
     assert 8188 == surface.number_of_triangles
     assert surface.valid_for_simulations
Beispiel #34
0
 def test_edit_entity_forget_commit(self):
     """
     Commit should be done automatically if you forget for some reason to do so in case of new/update/deletes.
     """
     stored_user = TestFactory.create_user('username', 'password', 'mail', True, 'role')
     user_id = stored_user.id
     self._dao_change_user_forget_commit(user_id, 'new_name')
     edited_user = dao.get_user_by_id(user_id)
     assert edited_user.username == 'new_name', \
         "User should be edited but it is not. Expected 'new_name' got %s" % edited_user.username
Beispiel #35
0
 def test_clone_burst_configuration(self):
     """
     Test that all the major attributes are the same after a clone burst but the
     id of the cloned one is None.
     """
     first_burst = TestFactory.store_burst(self.test_project.id)
     cloned_burst = first_burst.clone()
     self._compare_bursts(first_burst, cloned_burst)
     assert cloned_burst.name == first_burst.name, 'Cloned burst should have the same name'
     assert cloned_burst.id is None, 'id should be none for cloned entry.'
Beispiel #36
0
 def _asynch_launch_simple_op(self):
     adapter = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     view_model = TestModel()
     view_model.test1_val1 = 5
     view_model.test1_val2 = 6
     algo = adapter.stored_adapter
     operation = self.operation_service.prepare_operation(self.test_user.id, self.test_project, algo,
                                                          view_model=view_model)
     self.operation_service._send_to_cluster(operation, adapter)
     return operation
Beispiel #37
0
    def test_happy_flow_launch(self, connectivity_index_factory,
                               operation_factory):
        """
        Test that launching a simulation from UI works.
        """
        model = SimulatorAdapterModel()
        model.connectivity = connectivity_index_factory(
            self.CONNECTIVITY_NODES).gid
        model.simulation_length = 32

        TestFactory.launch_synchronously(self.test_user, self.test_project,
                                         self.simulator_adapter, model)
        sim_result = dao.get_generic_entity(TimeSeriesRegionIndex,
                                            'TimeSeriesRegion',
                                            'time_series_type')[0]
        assert (sim_result.data_length_1d, sim_result.data_length_2d,
                sim_result.data_length_3d,
                sim_result.data_length_4d) == (32, 1, self.CONNECTIVITY_NODES,
                                               1)
    def _create_operations_with_inputs(self, is_group_parent=False):
        """
        Method used for creating a complex tree of operations.

        If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as
        input for the returned operations.
        """
        group_dts, root_op_group_id = TestFactory.create_group(
            self.test_user, self.test_project)
        if is_group_parent:
            datatype_gid = group_dts[0].gid
        else:
            datatype_gid = TestProjectService._create_value_wrapper(
                self.test_user, self.test_project)[1]

        parameters = json.dumps({"param_name": datatype_gid})

        ops = []
        for i in range(4):
            ops.append(
                TestFactory.create_operation(test_user=self.test_user,
                                             test_project=self.test_project))
            if i in [1, 3]:
                ops[i].visible = False
            ops[i].parameters = parameters
            ops[i] = dao.store_entity(ops[i])

        #groups
        _, ops_group = TestFactory.create_group(self.test_user,
                                                self.test_project)
        ops_group = dao.get_operations_in_group(ops_group)
        assert 2 == len(ops_group)
        ops_group[0].parameters = parameters
        ops_group[0] = dao.store_entity(ops_group[0])
        ops_group[1].visible = False
        ops_group[1].parameters = parameters
        ops_group[1] = dao.store_entity(ops_group[1])

        ops.extend(ops_group)
        if is_group_parent:
            dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id)
            return ops, dt_group.id
        return ops, datatype_gid
 def test_adapter_memory(self, test_adapter_factory):
     """
     Test that a method not implemented exception is raised in case the
     get_required_memory_size method is not implemented.
     """
     test_adapter_factory(adapter_class=TestAdapterHDDRequired)
     adapter = TestFactory.create_adapter(
         "tvb.tests.framework.adapters.testadapter3",
         "TestAdapterHDDRequired")
     assert 42 == adapter.get_required_memory_size()
Beispiel #40
0
 def test_create_project_happy_flow(self):
     """
     Standard flow for creating a new project.
     """
     user1 = TestFactory.create_user('test_user1')
     user2 = TestFactory.create_user('test_user2')
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     assert len(initial_projects) == 0, "Database reset probably failed!"
     TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id])
     resulting_projects = dao.get_projects_for_user(self.test_user.id)
     assert len(resulting_projects) == 1, "Project with valid data not inserted!"
     project = resulting_projects[0]
     if project.name == "test_project":
         assert project.description == "description", "Description do no match"
         users_for_project = dao.get_members_of_project(project.id)
         for user in users_for_project:
             assert user.id in [user1.id, user2.id], "Users not stored properly."
     assert os.path.exists(os.path.join(TvbProfile.current.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER,
                                                 "test_project")), "Folder for project was not created"
Beispiel #41
0
 def _launch_test_algo_on_cluster(self, **data):
     adapter = TestFactory.create_adapter(
         'tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     algo = adapter.stored_adapter
     algo_category = dao.get_category_by_id(algo.fk_category)
     operations, _ = self.operation_service.prepare_operations(
         self.test_user.id, self.test_project, algo, algo_category, {},
         **data)
     self.operation_service._send_to_cluster(operations, adapter)
     return operations
Beispiel #42
0
    def test_retrieve_project_full(self, dummy_datatype_index_factory):
        """
        Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)`
        """

        project = TestFactory.create_project(self.test_user)
        operation = TestFactory.create_operation(test_user=self.test_user,
                                                 test_project=project)

        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)
        dummy_datatype_index_factory(project=project, operation=operation)

        _, ops_nr, operations, pages_no = self.project_service.retrieve_project_full(
            project.id)
        assert ops_nr == 1, "DataType Factory should only use one operation to store all it's datatypes."
        assert pages_no == 1, "DataType Factory should only use one operation to store all it's datatypes."
        resulted_dts = operations[0]['results']
        assert len(resulted_dts) == 3, "3 datatypes should be created."
 def test_retrieve_1project_3usr(self):
     """
     One user as admin, two users as members, getting projects for admin and for any of
     the members should return one.
     """
     member1 = TestFactory.create_user("member1")
     member2 = TestFactory.create_user("member2")
     TestFactory.create_project(self.test_user,
                                'Testproject',
                                users=[member1.id, member2.id])
     projects = self.project_service.retrieve_projects_for_user(
         self.test_user.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
     projects = self.project_service.retrieve_projects_for_user(
         member1.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
     projects = self.project_service.retrieve_projects_for_user(
         member2.id, 1)[0]
     assert len(projects) == 1, "Projects not retrieved properly!"
Beispiel #44
0
 def test_get_available_bursts_happy(self):
     """
     Test that all the correct burst are returned for the given project.
     """
     project = Project("second_test_proj", self.test_user.id, "description")
     second_project = dao.store_entity(project)
     test_project_bursts = [TestFactory.store_burst(self.test_project.id).id for _ in range(4)]
     second_project_bursts = [TestFactory.store_burst(second_project.id).id for _ in range(3)]
     returned_test_project_bursts = [burst.id for burst in
                                     self.burst_service.get_available_bursts(self.test_project.id)]
     returned_second_project_bursts = [burst.id for burst in
                                       self.burst_service.get_available_bursts(second_project.id)]
     assert len(test_project_bursts) == len(returned_test_project_bursts), \
         "Incorrect bursts retrieved for project %s." % self.test_project
     assert len(second_project_bursts) == len(returned_second_project_bursts), \
         "Incorrect bursts retrieved for project %s." % second_project
     assert set(second_project_bursts) == set(returned_second_project_bursts), \
         "Incorrect bursts retrieved for project %s." % second_project
     assert set(test_project_bursts) == set(returned_test_project_bursts), \
         "Incorrect bursts retrieved for project %s." % self.test_project
 def test_clone_burst_configuration(self):
     """
     Test that all the major attributes are the same after a clone burst but the
     id of the cloned one is None.
     """
     first_burst = TestFactory.store_burst(self.test_project.id)
     cloned_burst = first_burst.clone()
     self._compare_bursts(first_burst, cloned_burst)
     assert first_burst.selected_tab == cloned_burst.selected_tab, "Selected tabs not equal for bursts."
     assert len(first_burst.tabs) == len(cloned_burst.tabs), "Tabs not equal for bursts."
     assert cloned_burst.id is None, 'id should be none for cloned entry.'
Beispiel #46
0
    def _import(self, import_file_path=None, expected_result_class=StructuralMRIIndex, connectivity_gid=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """
        view_model = NIFTIImporterModel()
        view_model.data_file = import_file_path
        view_model.mappings_file = self.TXT_FILE
        view_model.apply_corrections = True
        view_model.connectivity = connectivity_gid
        view_model.data_subject = "Bla Bla"

        TestFactory.launch_importer(NIFTIImporter, view_model, self.test_user, self.test_project, False)

        dts, count = dao.get_values_of_datatype(self.test_project.id, expected_result_class, None)
        assert 1, count == "Project should contain only one data type."

        result = load_entity_by_gid(dts[0][2])
        assert result is not None, "Result should not be none"
        return result
Beispiel #47
0
    def test_launch_operation_hdd_with_space(self):
        """
        Test the actual operation flow by executing a test adapter.
        """
        adapter = TestFactory.create_adapter("tvb.tests.framework.adapters.dummy_adapter3", "DummyAdapterHDDRequired")
        view_model = adapter.get_view_model()()

        TvbProfile.current.MAX_DISK_SPACE = float(adapter.get_required_disk_size(view_model))
        self.operation_service.initiate_operation(self.test_user, self.test_project, adapter,
                                                  model_view=view_model)
        self._assert_stored_ddti()
Beispiel #48
0
    def test_rename_burst(self):
        new_name = "Test Burst Configuration 2"
        operation = TestFactory.create_operation()
        burst_config = TestFactory.store_burst(self.test_project.id, operation)
        burst = dao.get_bursts_for_project(self.test_project.id)
        self.sess_mock['burst_id'] = str(burst[0].id)
        self.sess_mock['burst_name'] = new_name

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_BURST_CONFIG,
                               self.session_stored_simulator)
            common.add2session(common.KEY_BURST_CONFIG, burst_config)
            result = self.simulator_controller.rename_burst(
                burst[0].id, new_name)

        assert result == '{"success": "Simulation successfully renamed!"}', \
            "Some error happened at renaming, probably because of invalid new name."
        assert dao.get_bursts_for_project(
            self.test_project.id
        )[0].name == new_name, "Name wasn't actually changed."
Beispiel #49
0
 def test_edit_project_unexisting(self):
     """
     Trying to edit an un-existing project.
     """
     selected_project = TestFactory.create_project(self.test_user, 'test_proj')
     self.storage_interface.get_project_folder(selected_project.name)
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     assert len(initial_projects) == 1, "Database initialization probably failed!"
     data = dict(name="test_project", description="test_description", users=[])
     with pytest.raises(ProjectServiceException):
         self.project_service.store_project(self.test_user, False, 99, **data)
Beispiel #50
0
    def test_import_quads_no_normals(self):
        """
        Test that import works with a file which contains quads and no normals
        """
        surface_index = TestFactory.import_surface_obj(self.test_user, self.test_project, self.face,
                                                       SurfaceTypesEnum.FACE_SURFACE, False)

        surface = h5.load_from_index(surface_index)
        assert 8614 == len(surface.vertex_normals)
        assert 8614 == len(surface.vertices)
        assert 17224 == len(surface.triangles)
Beispiel #51
0
    def test_import_simple_with_normals(self):
        """
        Test that import works with an OBJ file which includes normals
        """
        surface_index = TestFactory.import_surface_obj(self.test_user, self.test_project, self.torus,
                                                       SurfaceTypesEnum.FACE_SURFACE, False)
        assert 441 == surface_index.number_of_vertices
        assert 800 == surface_index.number_of_triangles

        surface = h5.load_from_index(surface_index)
        assert 441 == len(surface.vertex_normals)
Beispiel #52
0
    def _import_csv_test_connectivity(self, reference_connectivity_gid,
                                      subject):
        ### First prepare input data:
        data_dir = path.abspath(path.dirname(tvb_data.__file__))

        toronto_dir = path.join(data_dir, 'dti_pipeline_toronto')
        weights = path.join(toronto_dir, 'output_ConnectionCapacityMatrix.csv')
        tracts = path.join(toronto_dir, 'output_ConnectionDistanceMatrix.csv')
        weights_tmp = weights + '.tmp'
        tracts_tmp = tracts + '.tmp'
        self.storage_interface.copy_file(weights, weights_tmp)
        self.storage_interface.copy_file(tracts, tracts_tmp)

        view_model = CSVConnectivityImporterModel()
        view_model.weights = weights_tmp
        view_model.tracts = tracts_tmp
        view_model.data_subject = subject
        view_model.input_data = reference_connectivity_gid
        TestFactory.launch_importer(CSVConnectivityImporter, view_model,
                                    self.test_user, self.test_project, False)
 def test_retrieve_projects_for_user(self):
     """
     Test for retrieving the projects for a given user. One page only.
     """
     initial_projects = self.project_service.retrieve_projects_for_user(
         self.test_user.id)[0]
     assert len(initial_projects) == 0, "Database was not reset properly!"
     TestFactory.create_project(self.test_user, 'test_proj')
     TestFactory.create_project(self.test_user, 'test_proj1')
     TestFactory.create_project(self.test_user, 'test_proj2')
     user1 = TestFactory.create_user('another_user')
     TestFactory.create_project(user1, 'test_proj3')
     projects = self.project_service.retrieve_projects_for_user(
         self.test_user.id)[0]
     assert len(projects) == 3, "Projects not retrieved properly!"
     for project in projects:
         assert project.name != "test_project3", "This project should not have been retrieved"
Beispiel #54
0
    def test_get_filtered_by_column(self):
        """
        Test the filter function when retrieving dataTypes with a filter
        after a column from a class specific table (e.g. DATA_arraywrapper).
        """
        operation_1 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
        operation_2 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)

        one_dim_array = numpy.arange(5)
        two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]])
        self._store_float_array(one_dim_array, "John Doe 1", operation_1.id)
        self._store_float_array(one_dim_array, "John Doe 2", operation_1.id)
        self._store_float_array(two_dim_array, "John Doe 3", operation_2.id)

        count = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert count, 3 == "Problems with inserting data"
        first_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[1])
        count = self.flow_service.get_available_datatypes(self.test_project.id,
                                                          "tvb.datatypes.arrays.MappedArray", first_filter)[1]
        assert count, 2 == "Data was not filtered"

        second_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[2])
        filtered_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb.datatypes.arrays.MappedArray", second_filter)[0]
        assert len(filtered_data), 1 == "Data was not filtered"
        assert filtered_data[0][3] == "John Doe 3"

        third_filter = FilterChain(fields=[FilterChain.datatype + '._length_1d'], operations=["=="], values=[3])
        filtered_data = self.flow_service.get_available_datatypes(self.test_project.id,
                                                                  "tvb.datatypes.arrays.MappedArray", third_filter)[0]
        assert len(filtered_data), 1 == "Data was not filtered correct"
        assert filtered_data[0][3] == "John Doe 3"
        try:
            if os.path.exists('One_dim.txt'):
                os.remove('One_dim.txt')
            if os.path.exists('Two_dim.txt'):
                os.remove('Two_dim.txt')
            if os.path.exists('One_dim-1.txt'):
                os.remove('One_dim-1.txt')
        except Exception:
            pass
Beispiel #55
0
    def test_load_burst_only(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity',
                             'connectivity_66.zip')
        connectivity = TestFactory.import_zip_connectivity(
            self.test_user, self.test_project, zip_path, "John")

        op = TestFactory.create_operation(test_user=self.test_user,
                                          test_project=self.test_project)
        burst_config = BurstConfiguration(self.test_project.id)
        burst_config.fk_simulation = op.id
        burst_config.simulator_gid = self.session_stored_simulator.gid.hex
        burst_config.name = 'Test_Burst'
        burst_config = dao.store_entity(burst_config)

        self.sess_mock['burst_id'] = str(burst_config.id)
        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG,
                               self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)
            self.simulator_controller.set_stimulus(**self.sess_mock._data)

        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(op.id))
        SimulatorSerializer().serialize_simulator(
            self.session_stored_simulator, None, storage_path)

        with patch('cherrypy.session', self.sess_mock, create=True):
            self.simulator_controller.load_burst_read_only(str(
                burst_config.id))
            is_simulator_load = common.get_from_session(KEY_IS_SIMULATOR_LOAD)
            is_simulator_copy = common.get_from_session(KEY_IS_SIMULATOR_COPY)
            last_loaded_form_url = common.get_from_session(
                KEY_LAST_LOADED_FORM_URL)

        assert is_simulator_load, "Simulator Load Flag should be True!"
        assert not is_simulator_copy, "Simulator Copy Flag should be False!"
        assert last_loaded_form_url == '/burst/setup_pse', "Incorrect last form URL!"
Beispiel #56
0
    def test_get_linkable_projects(self):
        """
        Test for retrieving the projects for a given user.
        """
        initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        assert len(initial_projects) == 0, "Database was not reset!"
        test_proj = []
        user1 = TestFactory.create_user("another_user")
        for i in range(4):
            test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i)))
        operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0])
        datatype = dao.store_entity(model_datatype.DataType(module="test_data", subject="subj1",
                                                            state="test_state", operation_id=operation.id))

        linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0]

        assert len(linkable) == 2, "Wrong count of link-able projects!"
        proj_names = [project.name for project in linkable]
        assert test_proj[1].name in proj_names
        assert test_proj[2].name in proj_names
        assert not test_proj[3].name in proj_names
 def test_viewoperations(self):
     """ 
     Test the viewoperations from projectcontroller.
     """
     operation = TestFactory.create_operation(test_user=self.test_user,
                                              test_project=self.test_project)
     result_dict = self.project_c.viewoperations(self.test_project.id)
     operation_list = result_dict['operationsList']
     assert len(operation_list) == 1
     assert operation_list[0]['id'] == str(operation.id)
     assert 'no_filter_selected' in result_dict
     assert 'total_op_count' in result_dict
Beispiel #58
0
    def test_set_connectivity(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity',
                             'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path, "John")
        connectivity = TestFactory.get_entity(self.test_project,
                                              ConnectivityIndex)

        self.sess_mock['_connectivity'] = connectivity.gid
        self.sess_mock['_conduction_speed'] = "3.0"
        self.sess_mock['_coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG,
                               self.session_stored_simulator)
            self.simulator_controller.set_connectivity(**self.sess_mock._data)

        assert self.session_stored_simulator.connectivity.hex == connectivity.gid, "Connectivity was not set correctly."
        assert self.session_stored_simulator.conduction_speed == 3.0, "Conduction speed was not set correctly."
        assert isinstance(self.session_stored_simulator.coupling,
                          Sigmoidal), "Coupling was not set correctly."
Beispiel #59
0
 def test_get_simple_adapter_interface(self, test_adapter_factory):
     algo = test_adapter_factory()
     form = TestAdapter1Form()
     adapter = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter1', 'TestAdapter1')
     adapter.submit_form(form)
     result = self.flow_c.get_simple_adapter_interface(algo.id)
     expected_interface = adapter.get_form()
     found_form = result['adapter_form']['adapter_form']
     assert isinstance(result['adapter_form'], dict)
     assert isinstance(found_form, TestAdapter1Form)
     assert found_form.test1_val1.value == expected_interface.test1_val1.value
     assert found_form.test1_val2.value == expected_interface.test1_val2.value
    def test_reset_simulator_configuration(self):
        zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John")
        connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex)

        self.sess_mock['connectivity'] = connectivity.gid
        self.sess_mock['conduction_speed'] = "3.0"
        self.sess_mock['coupling'] = "Sigmoidal"

        with patch('cherrypy.session', self.sess_mock, create=True):
            common.add2session(common.KEY_SIMULATOR_CONFIG, self.session_stored_simulator)
            rendering_rules = self.simulator_controller.set_connectivity(**self.sess_mock._data)

        assert rendering_rules['renderer'].is_first_fragment is False, \
            "Page should have advanced past the first fragment."

        with patch('cherrypy.session', self.sess_mock, create=True):
            rendering_rules = self.simulator_controller.reset_simulator_configuration()

        assert rendering_rules['renderer'].is_first_fragment is True, \
            "Page should be set to the first fragment."