Esempio n. 1
0
    def create_interlinked_projects(self, region_mapping_factory, time_series_region_index_factory):
        """
        Extend the two projects created in setup.
        Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project.
        Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity
        """
        # add a connectivity to src project and link it to dest project
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.dst_user, self.dest_project, zip_path, "John")
        conn = TestFactory.get_entity(self.dest_project, ConnectivityIndex)
        self.flow_service.create_link([conn.id], self.dest_project.id)
        # in dest derive a time series from the linked conn

        path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip')
        surface = TestFactory.import_surface_zip(self.dst_user, self.dest_project, path, CORTICAL)

        TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt')
        region_mapping = TestFactory.import_region_mapping(self.dst_user, self.dest_project, TXT_FILE,
                                                                surface.gid, conn.gid)

        ts = time_series_region_index_factory(connectivity=h5.load_from_index(conn), region_mapping=h5.load_from_index(region_mapping))
        # then link the time series in the src project
        self.flow_service.create_link([ts.id], self.src_project.id)

        assert 3 == len(dao.get_datatypes_in_project(self.src_project.id))
        assert 1 == len(dao.get_linked_datatypes_in_project(self.src_project.id))
        assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 3 == len(dao.get_linked_datatypes_in_project(self.dest_project.id))
Esempio n. 2
0
        def build():
            """
            Project src will have 3 datatypes, and a link to the VW from the dest project.
            Project dest will have the derived VW and links
            """
            # add a connectivity to src project and link it to dest project
            zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                    'connectivity', 'connectivity_96.zip')
            conn = TestFactory.import_zip_connectivity(self.dst_user,
                                                       self.src_project,
                                                       zip_path, "John")
            self.flow_service.create_link([conn.id], self.dest_project.id)

            # in dest derive a ValueWrapper from the linked conn
            vw_gid = TestFactory.create_value_wrapper(self.dst_user,
                                                      self.dest_project)[1]
            vw = dao.get_datatype_by_gid(vw_gid)
            # then link the time series in the src project
            self.flow_service.create_link([vw.id], self.src_project.id)

            assert 3 == len(dao.get_datatypes_in_project(self.src_project.id))
            assert 1 == len(
                dao.get_linked_datatypes_in_project(self.src_project.id))
            assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id))
            assert 3 == len(
                dao.get_linked_datatypes_in_project(self.dest_project.id))
Esempio n. 3
0
    def test_linked_datatype_dependencies_restored_on_import_inverse_order(self):
        self._create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        self.assertEqual(4, len(dao.get_datatypes_in_project(self.dest_project.id)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing dest before src should work
        imported_id_2 = self._import(export_file_dest, self.dest_usr_id)
        self.assertEqual(4, len(dao.get_datatypes_in_project(imported_id_2)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(imported_id_2)))

        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        self.assertEqual(0, len(dao.get_datatypes_in_project(imported_id_1)))
        self.assertEqual(4, len(dao.get_linked_datatypes_in_project(imported_id_1)))
    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=1,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter3",
                                                               "TestAdapter3", step_index=2,
                                                               dynamic_kwargs={
                                                                   "test": {wf_cfg.DATATYPE_INDEX_KEY: 0,
                                                                            wf_cfg.STEP_INDEX_KEY: 1}})]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 3, "DataType from all step were not stored."
        for result_row in stored_datatypes:
            assert  result_row.type in ['Datatype1', 'Datatype2'], "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didn't start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didn't finish."
        assert  error == 0, "Some operations finished with error status."
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
                                                               "TestAdapter2", step_index=1,
                                                               static_kwargs={"test2": 2}),
                              TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
                                                               "TestAdapter1", step_index=2,
                                                               static_kwargs={"test1_val1": 1, "test1_val2": 1})]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert  len(stored_datatypes) == 2, "DataType from second step was not stored."
        assert  stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
        assert  stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."

        finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
        assert  finished == 3, "Didnt start operations for both adapters in workflow."
        assert  started == 0, "Some operations from workflow didnt finish."
        assert  error == 0, "Some operations finished with error status."
Esempio n. 6
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_datatypes = dao.get_datatypes_in_project(project_id)

            # Delete datatypes one by one in the reversed order of their creation date
            project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True)
            links = []
            for one_data in project_datatypes:
                new_links = self.remove_datatype(project_id, one_data.gid, True, links)
                if new_links is not None:
                    # Keep track of links so we don't create the same link more than once
                    links.extend(new_links)

            self.storage_interface.remove_project(project2delete)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
Esempio n. 7
0
    def test_linked_datatype_dependencies_restored_on_import_inverse_order(self, transactional_setup_fixture, create_interlinked_projects):
        create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 0 == len(dao.get_linked_datatypes_in_project(self.dest_project.id))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing dest before src should work
        imported_id_2 = self._import(export_file_dest, self.dest_usr_id)
        assert 4 == len(dao.get_datatypes_in_project(imported_id_2))
        assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_2))

        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        assert 0 == len(dao.get_datatypes_in_project(imported_id_1))
        assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_1))
    def test_get_project_structure(self, datatype_group_factory,
                                   dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3

        user = user_factory()
        project = project_factory(user)
        dt_group = datatype_group_factory(project=project)

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_to_link = dummy_datatype_index_factory(state="RAW_DATA")
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_group
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_group
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            project, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + SELF_DTS_NUMBER + 2 == len(
            dts_in_tree), "invalid number of nodes in tree"
        assert not link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected."
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
Esempio n. 9
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
Esempio n. 10
0
    def test_linked_datatype_dependencies_restored_on_import(self):
        self._create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 0 == len(dao.get_linked_datatypes_in_project(self.dest_project.id))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing both projects should work
        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        assert 4 == len(dao.get_datatypes_in_project(imported_id_1))
        assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_1))

        imported_id_2 = self._import(export_file_dest, self.dest_usr_id)
        assert 0 == len(dao.get_datatypes_in_project(imported_id_2))
        assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_2))
Esempio n. 11
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_datatypes = dao.get_datatypes_in_project(project_id)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(model.Links, one_link.id)

            self.structure_helper.remove_project_structure(project2delete.name)
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException, excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
    def test_get_project_structure(self, datatype_group_factory,
                                   dummy_datatype_index_factory,
                                   project_factory, user_factory):
        """
        Tests project structure is as expected and contains all datatypes and created links
        """
        user = user_factory()
        project1 = project_factory(user, name="TestPS1")
        project2 = project_factory(user, name="TestPS2")

        dt_group = datatype_group_factory(project=project1)
        dt_simple = dummy_datatype_index_factory(state="RAW_DATA",
                                                 project=project1)
        # Create 3 DTs directly in Project 2
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)
        dummy_datatype_index_factory(state="RAW_DATA", project=project2)

        # Create Links from Project 1 into Project 2
        link_ids, expected_links = [], []
        link_ids.append(dt_simple.id)
        expected_links.append(dt_simple.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        dts = dao.get_datatype_in_group(datatype_group_id=dt_group.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(dt_group.id)
        expected_links.append(dt_group.gid)

        # Actually create the links from Prj1 into Prj2
        AlgorithmService().create_link(link_ids, project2.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(project2.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            project2, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        assert len(expected_links) + 3 == len(
            dts_in_tree), "invalid number of nodes in tree"
        assert dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!"
        assert dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!"

        project_dts = dao.get_datatypes_in_project(project2.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                assert not dt.gid in node_json, "DTs part of a group should not be"
                assert not dt.gid in dts_in_tree, "DTs part of a group should not be"
            else:
                assert dt.gid in node_json, "Simple DTs and DT_Groups should be"
                assert dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be"

        for link_gid in expected_links:
            assert link_gid in node_json, "Expected Link not present"
            assert link_gid in dts_in_tree, "Expected Link not present"
Esempio n. 13
0
    def _create_interlinked_projects(self):
        """
        Extend the two projects created in setup.
        Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project.
        Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity
        """
        # add a connectivity to src project and link it to dest project
        _, conn = self.datatype_factory_src.create_connectivity()
        self.flow_service.create_link([conn.id], self.dest_project.id)
        # in dest derive a time series from the linked conn
        ts = self.datatype_factory_dest.create_timeseries(conn)
        # then link the time series in the src project
        self.flow_service.create_link([ts.id], self.src_project.id)

        self.assertEqual(3, len(dao.get_datatypes_in_project(self.src_project.id)))
        self.assertEqual(1, len(dao.get_linked_datatypes_in_project(self.src_project.id)))
        self.assertEqual(1, len(dao.get_datatypes_in_project(self.dest_project.id)))
        self.assertEqual(3, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
                                                               DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree), "invalid number of nodes in tree")
        self.assertFalse(link_gr.gid in dts_in_tree, "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree, "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json, "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json, "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree, "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json, "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree, "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree, "Expected Link not present")
    def _gather_project_datatypes(project, only_visible):

        project_datatypes = []

        dts = dao.get_datatypes_in_project(project.id, only_visible=only_visible)
        for dt in dts:
            project_datatypes.append({KEY_DT_GID: dt.gid,
                                      KEY_BURST_ID: dt.fk_parent_burst,
                                      KEY_OPERATION_ID: dt.fk_from_operation,
                                      KEY_DT_DATE: dt.create_date})
        return project_datatypes
Esempio n. 16
0
    def _gather_project_datatypes(project, only_visible):

        project_datatypes = []

        dts = dao.get_datatypes_in_project(project.id, only_visible=only_visible)
        for dt in dts:
            project_datatypes.append({KEY_DT_GID: dt.gid,
                                      KEY_BURST_ID: dt.fk_parent_burst,
                                      KEY_OPERATION_ID: dt.fk_from_operation,
                                      KEY_DT_DATE: dt.create_date})
        return project_datatypes
Esempio n. 17
0
    def test_import_datatype_with_links(self, region_mapping_index_factory, user_factory, project_factory):
        """
        This is a test for importing region mapping with links, that results in importing:
        connectivity, surface and region mapping all from one zip.
        """
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)

        region_mapping_index = region_mapping_index_factory()

        export_manager = ExportManager()
        _, exported_h5_file, _ = export_manager.export_data(region_mapping_index, self.TVB_LINKED_EXPORTER, self.test_project)

        # Clean DB
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)

        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes), "There are no DT's in DB before import."

        self._import(exported_h5_file)

        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 3 == len(datatypes), "Project should contain 3 data types."

        has_conn = False
        has_surface = False
        for dt in datatypes:
            if dt.gid == region_mapping_index.fk_connectivity_gid:
                has_conn = True
            if dt.gid == region_mapping_index.fk_surface_gid:
                has_surface = True

        assert has_conn is True, "Connectivity was imported as linked"
        assert has_surface is True, "Surface was imported as linked"
    def test_workflow_dynamic_params(self):
        """
        A simple test just for the fact that dynamic parameters are passed properly
        between two workflow steps: 
                  step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
                  step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
        The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance. 
        The second adapter has this passed as a dynamic workflow parameter.
        We check that the steps are actually ran by checking that two operations 
        are created and that two dataTypes are stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=1,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                }),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter3",
                "TestAdapter3",
                step_index=2,
                dynamic_kwargs={
                    "test": {
                        wf_cfg.DATATYPE_INDEX_KEY: 0,
                        wf_cfg.STEP_INDEX_KEY: 1
                    }
                })
        ]

        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 3,
            "DataType from all step were not stored.")
        for result_row in stored_datatypes:
            self.assertTrue(result_row.type in ['Datatype1', 'Datatype2'],
                            "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didn't start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didn't finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")
Esempio n. 19
0
    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        assert 0 == len(remaining_bursts), "Burst was not deleted"
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        assert 0 == ops_number, "Operations were not deleted."
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes)

        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        assert 0 == datatype1_stored, "Specific datatype entries for DataType1 were not deleted."
        assert 0 == datatype2_stored, "Specific datatype entries for DataType2 were not deleted."
    def test_workflow_generation(self):
        """
        A simple test just for the fact that a workflow is created an ran, 
        no dynamic parameters are passed. In this case we create a two steps
        workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
                  step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
        The first adapter doesn't return anything and the second returns one
        tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
        are actually ran by checking that two operations are created and that
        one dataType is stored.
        """
        workflow_step_list = [
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter2",
                "TestAdapter2",
                step_index=1,
                static_kwargs={"test2": 2}),
            TestFactory.create_workflow_step(
                "tvb.tests.framework.adapters.testadapter1",
                "TestAdapter1",
                step_index=2,
                static_kwargs={
                    "test1_val1": 1,
                    "test1_val2": 1
                })
        ]
        self.__create_complex_workflow(workflow_step_list)
        stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertTrue(
            len(stored_datatypes) == 2,
            "DataType from second step was not stored.")
        self.assertTrue(stored_datatypes[0].type == 'Datatype1',
                        "Wrong type was stored.")
        self.assertTrue(stored_datatypes[1].type == 'Datatype1',
                        "Wrong type was stored.")

        finished, started, error, _, _ = dao.get_operation_numbers(
            self.test_project.id)
        self.assertEqual(
            finished, 3,
            "Didnt start operations for both adapters in workflow.")
        self.assertEqual(started, 0,
                         "Some operations from workflow didnt finish.")
        self.assertEqual(error, 0,
                         "Some operations finished with error status.")
    def _check_burst_removed(self):
        """
        Test that a burst was properly removed. This means checking that the burst entity,
        any workflow steps and any datatypes resulted from the burst are also removed.
        """
        remaining_bursts = dao.get_bursts_for_project(self.test_project.id)
        self.assertEqual(0, len(remaining_bursts), "Burst was not deleted")
        ops_number = dao.get_operation_numbers(self.test_project.id)[0]
        self.assertEqual(0, ops_number, "Operations were not deleted.")
        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        self.assertEqual(0, len(datatypes))

        wf_steps = self.count_all_entities(model.WorkflowStep)
        datatype1_stored = self.count_all_entities(Datatype1)
        datatype2_stored = self.count_all_entities(Datatype2)
        self.assertEqual(0, wf_steps, "Workflow steps were not deleted.")
        self.assertEqual(0, datatype1_stored, "Specific datatype entries for DataType1 were not deleted.")
        self.assertEqual(0, datatype2_stored, "Specific datatype entries for DataType2 were not deleted.")
Esempio n. 22
0
    def remove_project(self, project_id):
        """
        Remove Project from DB and File Storage.
        """
        try:
            project2delete = dao.get_project_by_id(project_id)

            self.logger.debug("Deleting project: id=" + str(project_id) + ' name=' + project2delete.name)
            project_datatypes = dao.get_datatypes_in_project(project_id)
            project_datatypes.sort(key=lambda dt: dt.create_date, reverse=True)
            for one_data in project_datatypes:
                self.remove_datatype(project_id, one_data.gid, True)

            links = dao.get_links_for_project(project_id)
            for one_link in links:
                dao.remove_entity(Links, one_link.id)
            project_bursts = dao.get_bursts_for_project(project_id)
            for burst in project_bursts:
                dao.remove_entity(burst.__class__, burst.id)

            project_folder = self.structure_helper.get_project_folder(project2delete)
            self.structure_helper.remove_project_structure(project2delete.name)
            encrypted_path = encryption_handler.compute_encrypted_folder_path(project_folder)
            if os.path.exists(encrypted_path):
                self.structure_helper.remove_folder(encrypted_path)
            if os.path.exists(encryption_handler.project_key_path(project_id)):
                os.remove(encryption_handler.project_key_path(project_id))
            dao.delete_project(project_id)
            self.logger.debug("Deleted project: id=" + str(project_id) + ' name=' + project2delete.name)

        except RemoveDataTypeException as excep:
            self.logger.exception("Could not execute operation Node Remove!")
            raise ProjectServiceException(str(excep))
        except FileStructureException as excep:
            self.logger.exception("Could not delete because of rights!")
            raise ProjectServiceException(str(excep))
        except Exception as excep:
            self.logger.exception(str(excep))
            raise ProjectServiceException(str(excep))
Esempio n. 23
0
def list_datatypes(project_id):
    fmt = "%24s %16s %5s %32s %12s"
    print(fmt % ('type', 'tag', 'id', 'gid', 'date'))
    for dt in dao.get_datatypes_in_project(project_id):
        print(fmt % (dt.type, dt.user_tag_1, dt.id, dt.gid, dt.create_date))
Esempio n. 24
0
def list_datatypes(project_id):
    fmt = "%24s %16s %5s %32s %12s"
    print(fmt % ('type', 'tag', 'id', 'gid', 'date'))
    for dt in dao.get_datatypes_in_project(project_id):
        print(fmt % (dt.type, dt.user_tag_1, dt.id, dt.gid, dt.create_date))
    def test_get_project_structure(self):
        """
        Tests project structure is as expected and contains all datatypes
        """
        SELF_DTS_NUMBER = 3
        dt_factory_1 = datatypes_factory.DatatypesFactory()
        self._create_datatypes(dt_factory_1, SELF_DTS_NUMBER)
        dt_group = dt_factory_1.create_datatype_group()

        link_ids, expected_links = [], []
        # Prepare link towards a simple DT
        dt_factory_2 = datatypes_factory.DatatypesFactory()
        dt_to_link = dt_factory_2.create_simple_datatype()
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Prepare links towards a full DT Group, but expecting only the DT_Group in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dts = dao.get_datatype_in_group(datatype_group_id=link_gr.id)
        link_ids.extend([dt_to_link.id for dt_to_link in dts])
        link_ids.append(link_gr.id)
        expected_links.append(link_gr.gid)

        # Prepare link towards a single DT inside a group, and expecting to find the DT in the final tree
        link_gr = dt_factory_2.create_datatype_group()
        dt_to_link = dao.get_datatype_in_group(datatype_group_id=link_gr.id)[0]
        link_ids.append(dt_to_link.id)
        expected_links.append(dt_to_link.gid)

        # Actually create the links from Prj2 into Prj1
        FlowService().create_link(link_ids, dt_factory_1.project.id)

        # Retrieve the raw data used to compose the tree (for easy parsing)
        dts_in_tree = dao.get_data_in_project(dt_factory_1.project.id)
        dts_in_tree = [dt.gid for dt in dts_in_tree]
        # Retrieve the tree json (for trivial validations only, as we can not decode)
        node_json = self.project_service.get_project_structure(
            dt_factory_1.project, None, DataTypeMetaData.KEY_STATE,
            DataTypeMetaData.KEY_SUBJECT, None)

        self.assertEqual(
            len(expected_links) + SELF_DTS_NUMBER + 2, len(dts_in_tree),
            "invalid number of nodes in tree")
        self.assertFalse(
            link_gr.gid in dts_in_tree,
            "DT_group where a single DT is linked is not expected.")
        self.assertTrue(dt_group.gid in dts_in_tree,
                        "DT_Group should be in the Project Tree!")
        self.assertTrue(dt_group.gid in node_json,
                        "DT_Group should be in the Project Tree JSON!")

        project_dts = dao.get_datatypes_in_project(dt_factory_1.project.id)
        for dt in project_dts:
            if dt.fk_datatype_group is not None:
                self.assertFalse(dt.gid in node_json,
                                 "DTs part of a group should not be")
                self.assertFalse(dt.gid in dts_in_tree,
                                 "DTs part of a group should not be")
            else:
                self.assertTrue(dt.gid in node_json,
                                "Simple DTs and DT_Groups should be")
                self.assertTrue(dt.gid in dts_in_tree,
                                "Simple DTs and DT_Groups should be")

        for link_gid in expected_links:
            self.assertTrue(link_gid in node_json, "Expected Link not present")
            self.assertTrue(link_gid in dts_in_tree,
                            "Expected Link not present")
Esempio n. 26
0
    def export_project(self, project, optimize_size=False):
        """
        Given a project root and the TVB storage_path, create a ZIP
        ready for export.
        :param project: project object which identifies project to be exported
        """
        if project is None:
            raise ExportException("Please provide project to be exported")

        project_folder = self.files_helper.get_project_folder(project)
        project_datatypes = dao.get_datatypes_in_project(
            project.id, only_visible=optimize_size)
        to_be_exported_folders = []
        considered_op_ids = []
        folders_to_exclude = self._get_op_with_errors(project.id)

        if optimize_size:
            # take only the DataType with visibility flag set ON
            for dt in project_datatypes:
                op_id = dt.fk_from_operation
                if op_id not in considered_op_ids:
                    to_be_exported_folders.append({
                        'folder':
                        self.files_helper.get_project_folder(
                            project, str(op_id)),
                        'archive_path_prefix':
                        str(op_id) + os.sep,
                        'exclude':
                        folders_to_exclude
                    })
                    considered_op_ids.append(op_id)

        else:
            folders_to_exclude.append("TEMP")
            to_be_exported_folders.append({
                'folder': project_folder,
                'archive_path_prefix': '',
                'exclude': folders_to_exclude
            })

        # Compute path and name of the zip file
        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, project.name,
                                      self.ZIP_FILE_EXTENSION)

        export_folder = self._build_data_export_folder(project)
        result_path = os.path.join(export_folder, zip_file_name)

        with TvbZip(result_path, "w") as zip_file:
            # Pack project [filtered] content into a ZIP file:
            self.logger.debug("Done preparing, now we will write folders " +
                              str(len(to_be_exported_folders)))
            self.logger.debug(str(to_be_exported_folders))
            for pack in to_be_exported_folders:
                zip_file.write_folder(**pack)
            self.logger.debug(
                "Done exporting files, now we will export linked DTs")
            self._export_linked_datatypes(project, zip_file)
            # Make sure the Project.xml file gets copied:
            if optimize_size:
                self.logger.debug("Done linked, now we write the project xml")
                zip_file.write(
                    self.files_helper.get_project_meta_file_path(project.name),
                    self.files_helper.TVB_PROJECT_FILE)
            self.logger.debug("Done, closing")

        return result_path