def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 3, "DataType from all step were not stored."
        for result_row in stored_datatypes:
            assert  result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didn't start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didn't finish."
        assert  error == 0, "Some operations finished with error status."
 def test_sync_burst_launch(self):
     """
     A full test for launching a burst. 
     First create the workflow steps and launch the burst.
     Then check that only operation created is for the first adapter from the portlet. The
     second should be viewed as a visualizer.
     After that load the burst and check that the visualizer and analyzer are loaded in the
     corresponding tab and that all the parameters are still the same. Finally check that burst
     status updates corresponding to final operation status.
     """
     loaded_burst, workflow_step_list = self._prepare_and_launch_sync_burst()
     finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
     self.assertEqual(finished, 1, "One operations should have been generated for this burst.")
     self.assertEqual(started, 0, "No operations should remain started since workflow was launched synchronous.")
     self.assertEqual(error, 0, "No operations should return error status.")
     self.assertTrue(loaded_burst.tabs[0].portlets[0] is not None, "Portlet not loaded from config!")
     portlet_config = loaded_burst.tabs[0].portlets[0]
     analyzers = portlet_config.analyzers
     self.assertEqual(len(analyzers), 0, "Only have 'simulator' and a visualizer. No analyzers should be loaded.")
     visualizer = portlet_config.visualizer
     self.assertTrue(visualizer is not None, "Visualizer should not be none.")
     self.assertEqual(visualizer.fk_algorithm, workflow_step_list[0].fk_algorithm,
                      "Different ids after burst load for visualizer.")
     self.assertEqual(visualizer.static_param, workflow_step_list[0].static_param,
                      "Different static params after burst load for visualizer.")
     self.assertEqual(visualizer.dynamic_param, workflow_step_list[0].dynamic_param,
                      "Different static params after burst load for visualizer.")
Esempio n. 3
0
 def store_project(self, current_user, is_create, selected_id, **data):
     """
     We want to create/update a project entity.
     """
     #Validate Unique Name
     new_name = data["name"]
     if len(new_name) < 1:
         raise ProjectServiceException("Invalid project name!")
     projects_no = dao.count_projects_for_name(new_name, selected_id)
     if projects_no > 0:
         err = {'name': 'Please choose another name, this one is used!'}
         raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
     started_operations = dao.get_operation_numbers(selected_id)[1]
     if started_operations > 0:
         raise ProjectServiceException("A project can not be renamed while operations are still running!")
     if is_create:
         current_proj = model.Project(new_name, current_user.id, data["description"])
         self.structure_helper.get_project_folder(current_proj)
     else:
         try:
             current_proj = dao.get_project_by_id(selected_id)
         except Exception, excep:
             self.logger.exception("An error has occurred!")
             raise ProjectServiceException(str(excep))
         if current_proj.name != new_name:
             self.structure_helper.rename_project_structure(current_proj.name, new_name)
         current_proj.name = new_name
         current_proj.description = data["description"]
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 2, "DataType from second step was not stored."
        assert  stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
        assert  stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didnt start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didnt finish."
        assert  error == 0, "Some operations finished with error status."
    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = model.Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_all_users(prj_admin, int(page))[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)
        selected_user_ids = data["users"]
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:' + current_user.username)
        return current_proj
 def retrieve_projects_for_user(self, user_id, current_page=1):
     """
     Return a list with all Projects visible for current user.
     """
     start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
     total = dao.get_projects_for_user(user_id, is_count=True)
     end_idx = (PROJECTS_PAGE_SIZE if total >= start_idx + PROJECTS_PAGE_SIZE else total - start_idx)
     available_projects = dao.get_projects_for_user(user_id, start_idx, end_idx)
     pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
     for prj in available_projects:
         fns, sta, err, canceled = dao.get_operation_numbers(prj.id)
         prj.operations_finished = fns
         prj.operations_started = sta
         prj.operations_error = err
         prj.operations_canceled = canceled
     self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
     return available_projects, pages_no
    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        self.assertEqual(0, len(remaining_bursts), "Burst was not deleted")
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        self.assertEqual(0, ops_number, "Operations were not deleted.")
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertEqual(0, len(datatypes))

        wf_steps = self.count_all_entities(model.WorkflowStep)
        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        self.assertEqual(0, wf_steps, "Workflow steps were not deleted.")
        self.assertEqual(0, datatype1_stored, "Specific datatype entries for DataType1 were not deleted.")
        self.assertEqual(0, datatype2_stored, "Specific datatype entries for DataType2 were not deleted.")
Esempio n. 8
0
 def retrieve_projects_for_user(self, user_id, current_page=1):
     """
     Return a list with all Projects visible for current user.
     """
     start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
     total = dao.get_projects_for_user(user_id, is_count=True)
     available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
     pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
     for prj in available_projects:
         fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
         prj.operations_finished = fns
         prj.operations_started = sta
         prj.operations_error = err
         prj.operations_canceled = canceled
         prj.operations_pending = pending
         prj.disk_size = dao.get_project_disk_size(prj.id)
         prj.disk_size_human = format_bytes_human(prj.disk_size)
     self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
     return available_projects, pages_no
Esempio n. 9
0
 def retrieve_projects_for_user(self, user_id, current_page=1):
     """
     Return a list with all Projects visible for current user.
     """
     start_idx = PROJECTS_PAGE_SIZE * (current_page - 1)
     total = dao.get_projects_for_user(user_id, is_count=True)
     available_projects = dao.get_projects_for_user(user_id, start_idx, PROJECTS_PAGE_SIZE)
     pages_no = total // PROJECTS_PAGE_SIZE + (1 if total % PROJECTS_PAGE_SIZE else 0)
     for prj in available_projects:
         fns, sta, err, canceled, pending = dao.get_operation_numbers(prj.id)
         prj.operations_finished = fns
         prj.operations_started = sta
         prj.operations_error = err
         prj.operations_canceled = canceled
         prj.operations_pending = pending
         prj.disk_size = dao.get_project_disk_size(prj.id)
         prj.disk_size_human = format_bytes_human(prj.disk_size)
     self.logger.debug("Displaying " + str(len(available_projects)) + " projects in UI for user " + str(user_id))
     return available_projects, pages_no
Esempio n. 10
0
 def test_sync_burst_launch(self):
     """
     A full test for launching a burst. 
     First create the workflow steps and launch the burst.
     Then check that only operation created is for the first adapter from the portlet. The
     second should be viewed as a visualizer.
     After that load the burst and check that the visualizer and analyzer are loaded in the
     corresponding tab and that all the parameters are still the same. Finally check that burst
     status updates corresponding to final operation status.
     """
     loaded_burst, workflow_step_list = self._prepare_and_launch_sync_burst(
     )
     finished, started, error, _, _ = dao.get_operation_numbers(
         self.test_project.id)
     self.assertEqual(
         finished, 1,
         "One operations should have been generated for this burst.")
     self.assertEqual(
         started, 0,
         "No operations should remain started since workflow was launched synchronous."
     )
     self.assertEqual(error, 0, "No operations should return error status.")
     self.assertTrue(loaded_burst.tabs[0].portlets[0] is not None,
                     "Portlet not loaded from config!")
     portlet_config = loaded_burst.tabs[0].portlets[0]
     analyzers = portlet_config.analyzers
     self.assertEqual(
         len(analyzers), 0,
         "Only have 'simulator' and a visualizer. No analyzers should be loaded."
     )
     visualizer = portlet_config.visualizer
     self.assertTrue(visualizer is not None,
                     "Visualizer should not be none.")
     self.assertEqual(visualizer.fk_algorithm,
                      workflow_step_list[0].fk_algorithm,
                      "Different ids after burst load for visualizer.")
     self.assertEqual(
         visualizer.static_param, workflow_step_list[0].static_param,
         "Different static params after burst load for visualizer.")
     self.assertEqual(
         visualizer.dynamic_param, workflow_step_list[0].dynamic_param,
         "Different static params after burst load for visualizer.")
Esempio n. 11
0
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert len(
            stored_datatypes) == 2, "DataType from second step was not stored."
        assert stored_datatypes[
            0].type == 'Datatype1', "Wrong type was stored."
        assert stored_datatypes[
            1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        assert finished == 3, "Didnt start operations for both adapters in workflow."
        assert started == 0, "Some operations from workflow didnt finish."
        assert error == 0, "Some operations finished with error status."
Esempio n. 12
0
    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        self.assertEqual(0, len(remaining_bursts), "Burst was not deleted")
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        self.assertEqual(0, ops_number, "Operations were not deleted.")
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertEqual(0, len(datatypes))

        wf_steps = self.count_all_entities(model.WorkflowStep)
        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        self.assertEqual(0, wf_steps, "Workflow steps were not deleted.")
        self.assertEqual(
            0, datatype1_stored,
            "Specific datatype entries for DataType1 were not deleted.")
        self.assertEqual(
            0, datatype2_stored,
            "Specific datatype entries for DataType2 were not deleted.")
Esempio n. 13
0
    def store_project(self, current_user, is_create, selected_id, **data):
        """
        We want to create/update a project entity.
        """
        # Validate Unique Name
        new_name = data["name"]
        if len(new_name) < 1:
            raise ProjectServiceException("Invalid project name!")
        projects_no = dao.count_projects_for_name(new_name, selected_id)
        if projects_no > 0:
            err = {'name': 'Please choose another name, this one is used!'}
            raise formencode.Invalid("Duplicate Name Error", {}, None, error_dict=err)
        started_operations = dao.get_operation_numbers(selected_id)[1]
        if started_operations > 0:
            raise ProjectServiceException("A project can not be renamed while operations are still running!")
        if is_create:
            current_proj = Project(new_name, current_user.id, data["description"])
            self.structure_helper.get_project_folder(current_proj)
        else:
            try:
                current_proj = dao.get_project_by_id(selected_id)
            except Exception as excep:
                self.logger.exception("An error has occurred!")
                raise ProjectServiceException(str(excep))
            if current_proj.name != new_name:
                project_folder = self.structure_helper.get_project_folder(current_proj)
                if encryption_handler.encryption_enabled() and not encryption_handler.is_in_usage(project_folder):
                    raise ProjectServiceException(
                        "A project can not be renamed while sync encryption operations are running")
                self.structure_helper.rename_project_structure(current_proj.name, new_name)
                encrypted_path = encryption_handler.compute_encrypted_folder_path(project_folder)
                if os.path.exists(encrypted_path):
                    new_encrypted_path = encryption_handler.compute_encrypted_folder_path(
                        self.structure_helper.get_project_folder(new_name))
                    os.rename(encrypted_path, new_encrypted_path)
            current_proj.name = new_name
            current_proj.description = data["description"]
        # Commit to make sure we have a valid ID
        current_proj.refresh_update_date()
        self.structure_helper.write_project_metadata(current_proj)
        current_proj = dao.store_entity(current_proj)

        # Retrieve, to initialize lazy attributes
        current_proj = dao.get_project_by_id(current_proj.id)
        # Update share settings on current Project entity
        visited_pages = []
        prj_admin = current_proj.administrator.username
        if 'visited_pages' in data and data['visited_pages']:
            visited_pages = data['visited_pages'].split(',')
        for page in visited_pages:
            members = UserService.retrieve_users_except([prj_admin], int(page), MEMBERS_PAGE_SIZE)[0]
            members = [m.id for m in members]
            dao.delete_members_for_project(current_proj.id, members)

        selected_user_ids = data["users"]
        if is_create and current_user.id not in selected_user_ids:
            # Make the project admin also member of the current project
            selected_user_ids.append(current_user.id)
        dao.add_members_to_project(current_proj.id, selected_user_ids)
        # Finish operation
        self.logger.debug("Edit/Save OK for project:" + str(current_proj.id) + ' by user:' + current_user.username)
        return current_proj
Esempio n. 14
0
 def get_operation_numbers(proj_id):
     """ Count total number of operations started for current project. """
     return dao.get_operation_numbers(proj_id)
Esempio n. 15
0
 def get_operation_numbers(proj_id):
     """ Count total number of operations started for current project. """
     return dao.get_operation_numbers(proj_id)