def test_remove_project_node(self):
     """
     Test removing of a node from a project.
     """
     inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
     project_to_link = model.Project("Link", self.test_user.id, "descript")
     project_to_link = dao.store_entity(project_to_link)
     exact_data = dao.get_datatype_by_gid(gid)
     dao.store_entity(model.Links(exact_data.id, project_to_link.id))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
     
     operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
     op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
     self.assertTrue(os.path.exists(op_folder))
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     ### Validate that no more files are created than needed.
     
     self.project_service._remove_project_node_files(inserted_project.id, gid)
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
     ### operation.xml file should still be there
     
     op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
     sub_files = os.listdir(op_folder)
     self.assertEqual(2, len(sub_files))
     self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
     self.project_service._remove_project_node_files(project_to_link.id, gid)
     self.assertTrue(dao.get_datatype_by_gid(gid) is None)  
     sub_files = os.listdir(op_folder)
     self.assertEqual(1, len(sub_files))
Exemplo n.º 2
0
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user) 
        project_to_link = model.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        dao.store_entity(model.Links(exact_data.id, project_to_link.id))
        assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
        
        operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
        op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
        assert os.path.exists(op_folder)
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        ### Validate that no more files are created than needed.

        if(dao.get_system_user() is None):
            dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
        self.project_service._remove_project_node_files(inserted_project.id, gid)
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
        ### operation.xml file should still be there
        
        op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1)) 
        sub_files = os.listdir(op_folder)
        assert 2 == len(sub_files)
        assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
        self.project_service._remove_project_node_files(project_to_link.id, gid)
        assert dao.get_datatype_by_gid(gid) is None
        sub_files = os.listdir(op_folder)
        assert 1 == len(sub_files)
Exemplo n.º 3
0
 def test_remove_used_surface(self):
     """
     Tries to remove an used surface
     """
     mapping = self.flow_service.get_available_datatypes(
         self.test_project.id, "tvb.datatypes.surfaces.RegionMapping")
     self.assertEquals(len(mapping), 1, "There should be one Mapping.")
     mapping_gid = mapping[0][2]
     mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
     #delete surface
     surfaces = self.flow_service.get_available_datatypes(
         self.test_project.id, "tvb.datatypes.surfaces.CorticalSurface")
     self.assertTrue(len(surfaces) > 0, "At least one Cortex expected")
     surface = dao.get_datatype_by_gid(mapping.surface.gid)
     self.assertEqual(surface.gid, mapping.surface.gid,
                      "The surfaces should have the same GID")
     try:
         self.project_service.remove_datatype(self.test_project.id,
                                              surface.gid)
         self.fail(
             "The surface is still used by a RegionMapping. It should not be possible to remove it."
         )
     except RemoveDataTypeException:
         #OK, do nothing
         pass
     res = dao.get_datatype_by_gid(surface.gid)
     self.assertEqual(surface.id, res.id, "A used surface was deleted")
Exemplo n.º 4
0
    def populate_surface_fields(self, time_series_index):
        """
        To be overwritten for populating fields: one_to_one_map/connectivity/region_map/surface fields
        """

        self.one_to_one_map = isinstance(time_series_index, TimeSeriesSurfaceIndex)

        if self.one_to_one_map:
            self.PAGE_SIZE /= 10
            surface_gid = time_series_index.fk_surface_gid
            surface_index = dao.get_datatype_by_gid(surface_gid)
            region_map_indexes = dao.get_generic_entity(RegionMappingIndex, surface_gid, 'fk_surface_gid')
            if len(region_map_indexes) < 1:
                region_map_index = None
                connectivity_index = None
            else:
                region_map_index = region_map_indexes[0]
                connectivity_index = dao.get_datatype_by_gid(region_map_index.fk_connectivity_gid)
        else:
            connectivity_index = dao.get_datatype_by_gid(time_series_index.fk_connectivity_gid)

            if time_series_index.fk_region_mapping_gid:
                region_map_index = dao.get_datatype_by_gid(time_series_index.fk_region_mapping_gid)
            else:
                region_map_indexes = dao.get_generic_entity(RegionMappingIndex, connectivity_index.gid,
                                                            'fk_connectivity_gid')
                region_map_index = region_map_indexes[0]

            surface_index = dao.get_datatype_by_gid(region_map_index.fk_surface_gid)

        self.connectivity_index = connectivity_index
        self.region_map_gid = None if region_map_index is None else region_map_index.gid
        self.surface_gid = None if surface_index is None else surface_index.gid
        self.surface_h5 = None if surface_index is None else h5.h5_file_for_index(surface_index)
Exemplo n.º 5
0
    def test_remove_project_node(self):
        """
        Test removing of a node from a project.
        """
        inserted_project, gid, op = TestFactory.create_value_wrapper(self.test_user)
        project_to_link = model_project.Project("Link", self.test_user.id, "descript")
        project_to_link = dao.store_entity(project_to_link)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Initialization problem!"
        link = dao.store_entity(model_datatype.Links(exact_data.id, project_to_link.id))

        vw_h5_path = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path)

        if dao.get_system_user() is None:
            dao.store_entity(model_operation.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME,
                                                  TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True,
                                                  None))

        self.project_service._remove_project_node_files(inserted_project.id, gid, [link])

        assert not os.path.exists(vw_h5_path)
        exact_data = dao.get_datatype_by_gid(gid)
        assert exact_data is not None, "Data should still be in DB, because of links"
        vw_h5_path_new = h5.path_for_stored_index(exact_data)
        assert os.path.exists(vw_h5_path_new)
        assert vw_h5_path_new != vw_h5_path

        self.project_service._remove_project_node_files(project_to_link.id, gid, [])
        assert dao.get_datatype_by_gid(gid) is None
Exemplo n.º 6
0
    def launch(self, view_model):
        # type: (BaseSurfaceViewerModel) -> dict

        connectivity_measure_index = self.load_entity_by_gid(
            view_model.connectivity_measure)
        cm_connectivity_gid = connectivity_measure_index.fk_connectivity_gid
        cm_connectivity_index = dao.get_datatype_by_gid(cm_connectivity_gid)

        region_map_index = None
        rm_connectivity_index = None
        if view_model.region_map:
            region_map_index = self.load_entity_by_gid(view_model.region_map)
            rm_connectivity_gid = region_map_index.fk_connectivity_gid
            rm_connectivity_index = dao.get_datatype_by_gid(
                rm_connectivity_gid)

        if not region_map_index or rm_connectivity_index.number_of_regions != cm_connectivity_index.number_of_regions:
            region_maps = dao.get_generic_entity(RegionMappingIndex,
                                                 cm_connectivity_gid,
                                                 'fk_connectivity_gid')
            if region_maps:
                region_map_index = region_maps[0]

        surface_gid = region_map_index.fk_surface_gid
        surface_viewer_model = SurfaceViewerModel(
            surface=surface_gid,
            region_map=region_map_index.gid,
            connectivity_measure=view_model.connectivity_measure,
            shell_surface=view_model.shell_surface)
        surface_viewer_model.title = self._ui_name
        return SurfaceViewer.launch(self, surface_viewer_model)
Exemplo n.º 7
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes):
        # type: (Project, dict) -> int
        sorted_dts = sorted(
            all_datatypes.items(),
            key=lambda dt_item: dt_item[1].create_date or datetime.now())

        count = 0
        for dt_path, datatype in sorted_dts:
            datatype_already_in_tvb = dao.get_datatype_by_gid(datatype.gid)
            if not datatype_already_in_tvb:
                self.store_datatype(datatype, dt_path)
                count += 1
            else:
                AlgorithmService.create_link([datatype_already_in_tvb.id],
                                             project.id)

            file_path = h5.h5_file_for_index(datatype).path
            h5_class = H5File.h5_class_from_file(file_path)
            reference_list = h5_class(file_path).gather_references()

            for _, reference_gid in reference_list:
                if not reference_gid:
                    continue

                ref_index = dao.get_datatype_by_gid(reference_gid.hex)
                if ref_index is None:
                    os.remove(file_path)
                    dao.remove_entity(datatype.__class__, datatype.id)
                    raise MissingReferenceException(
                        'Imported file depends on datatypes that do not exist. Please upload '
                        'those first!')

        return count
Exemplo n.º 8
0
 def test_set_visibility_datatype(self):
     """
     Set datatype visibility to true and false and check results are updated.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     self.assertTrue(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'False')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertFalse(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'True')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertTrue(datatype.visible)
Exemplo n.º 9
0
 def test_set_visibility_datatype(self, dummy_datatype_index_factory):
     """
     Set datatype visibility to true and false and check results are updated.
     """
     datatype = dummy_datatype_index_factory()
     assert datatype.visible
     self.project_c.set_visibility('datatype', datatype.gid, 'False')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     assert not datatype.visible
     self.project_c.set_visibility('datatype', datatype.gid, 'True')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     assert datatype.visible
 def test_set_visibility_datatype(self):
     """
     Set datatype visibility to true and false and check results are updated.
     """
     datatype = DatatypesFactory().create_datatype_with_storage()
     self.assertTrue(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'False')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertFalse(datatype.visible)
     self.project_c.set_visibility('datatype', datatype.gid, 'True')
     datatype = dao.get_datatype_by_gid(datatype.gid)
     self.assertTrue(datatype.visible)
Exemplo n.º 11
0
    def deserialize_simulator(simulator_gid, storage_path):
        simulator_in_path = h5.path_for(storage_path, SimulatorH5,
                                        simulator_gid)
        simulator_in = Simulator()

        with SimulatorH5(simulator_in_path) as simulator_in_h5:
            simulator_in_h5.load_into(simulator_in)
            connectivity_gid = simulator_in_h5.connectivity.load()
            stimulus_gid = simulator_in_h5.stimulus.load()
            simulation_state_gid = simulator_in_h5.simulation_state.load()

        conn_index = dao.get_datatype_by_gid(connectivity_gid.hex)
        conn = h5.load_from_index(conn_index)

        simulator_in.connectivity = conn

        if simulator_in.surface:
            cortex_path = h5.path_for(storage_path, CortexH5,
                                      simulator_in.surface.gid)
            with CortexH5(cortex_path) as cortex_h5:
                local_conn_gid = cortex_h5.local_connectivity.load()
                region_mapping_gid = cortex_h5.region_mapping_data.load()

            region_mapping_index = dao.get_datatype_by_gid(
                region_mapping_gid.hex)
            region_mapping_path = h5.path_for_stored_index(
                region_mapping_index)
            region_mapping = RegionMapping()
            with RegionMappingH5(region_mapping_path) as region_mapping_h5:
                region_mapping_h5.load_into(region_mapping)
                region_mapping.gid = region_mapping_h5.gid.load()
                surf_gid = region_mapping_h5.surface.load()

            surf_index = dao.get_datatype_by_gid(surf_gid.hex)
            surf_h5 = h5.h5_file_for_index(surf_index)
            surf = CorticalSurface()
            surf_h5.load_into(surf)
            surf_h5.close()
            region_mapping.surface = surf
            simulator_in.surface.region_mapping_data = region_mapping

            if local_conn_gid:
                local_conn_index = dao.get_datatype_by_gid(local_conn_gid.hex)
                local_conn = h5.load_from_index(local_conn_index)
                simulator_in.surface.local_connectivity = local_conn

        if stimulus_gid:
            stimulus_index = dao.get_datatype_by_gid(stimulus_gid.hex)
            stimulus = h5.load_from_index(stimulus_index)
            simulator_in.stimulus = stimulus

        return simulator_in, simulation_state_gid
Exemplo n.º 12
0
    def test_remove_value_wrapper(self):
        """
        Test the deletion of a value wrapper dataType
        """
        count_vals = self.count_all_entities(ValueWrapperIndex)
        assert 0 == count_vals, "There should be no value wrapper"
        value_wrapper_gid = TestFactory.create_value_wrapper(self.test_user, self.test_project)[1]
        res = dao.get_datatype_by_gid(value_wrapper_gid)
        assert res is not None, "The value wrapper was not created."

        self.project_service.remove_datatype(self.test_project.id, value_wrapper_gid)

        res = dao.get_datatype_by_gid(value_wrapper_gid)
        assert res is None, "The value wrapper was not deleted."
def update_dt(dt_id, new_create_date):
    dt = dao.get_datatype_by_id(dt_id)
    dt.create_date = new_create_date
    dao.store_entity(dt)
    # Update MetaData in H5 as well.
    dt = dao.get_datatype_by_gid(dt.gid)
    dt.persist_full_metadata()
Exemplo n.º 14
0
    def _get_launchable_algorithms(self, datatype_gid, categories):

        datatype_instance = dao.get_datatype_by_gid(datatype_gid)
        data_class = datatype_instance.__class__
        all_compatible_classes = [data_class.__name__]
        for one_class in getmro(data_class):
            if issubclass(
                    one_class, MappedType
            ) and one_class.__name__ not in all_compatible_classes:
                all_compatible_classes.append(one_class.__name__)

        self.logger.debug("Searching in categories: " + str(categories) +
                          " for classes " + str(all_compatible_classes))
        categories_ids = [categ.id for categ in categories]
        launchable_adapters = dao.get_applicable_adapters(
            all_compatible_classes, categories_ids)

        filtered_adapters = []
        for stored_adapter in launchable_adapters:
            filter_chain = FilterChain.from_json(
                stored_adapter.datatype_filter)
            if not filter_chain or filter_chain.get_python_filter_equivalent(
                    datatype_instance):
                filtered_adapters.append(stored_adapter)

        return datatype_instance, filtered_adapters
Exemplo n.º 15
0
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)
Exemplo n.º 16
0
    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        filter = FilterChain(fields=[FilterChain.datatype + '.surface_type'],
                             operations=["=="],
                             values=[CORTICAL])
        mapping = try_get_last_datatype(self.test_project.id,
                                        RegionMappingIndex)
        surface = try_get_last_datatype(self.test_project.id, SurfaceIndex,
                                        filter)
        assert mapping is not None, "There should be one Mapping."
        assert surface is not None, "There should be one Costical Surface."
        assert surface.gid == mapping.fk_surface_gid, "The surfaces should have the same GID"

        try:
            self.project_service.remove_datatype(self.test_project.id,
                                                 surface.gid)
            raise AssertionError(
                "The surface should still be used by a RegionMapping " +
                str(surface.gid))
        except RemoveDataTypeException:
            # OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        assert surface.id == res.id, "A used surface was deleted"
Exemplo n.º 17
0
    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception as excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))
Exemplo n.º 18
0
    def create_timeseries(self, connectivity, ts_type=None, sensors=None):
        """
        Create a stored TimeSeries entity.
        """
        operation, _, storage_path = self.__create_operation()

        if ts_type == "EEG":
            time_series = TimeSeriesEEG(storage_path=storage_path,
                                        sensors=sensors)
        else:
            rm = dao.get_generic_entity(RegionMapping, connectivity.gid,
                                        '_connectivity')
            if len(rm) < 1:
                rm = None
            else:
                rm = rm[0]
            time_series = TimeSeriesRegion(storage_path=storage_path,
                                           connectivity=connectivity,
                                           region_mapping=rm)

        data = numpy.random.random((10, 10, 10, 10))
        time_series.write_data_slice(data)
        time_series.write_time_slice(numpy.arange(10))
        adapter_instance = StoreAdapter([time_series])
        OperationService().initiate_prelaunch(operation, adapter_instance, {})
        time_series = dao.get_datatype_by_gid(time_series.gid)
        return time_series
Exemplo n.º 19
0
    def configure(self, simulator_gid):
        """
        Make preparations for the adapter launch.
        """
        self.log.debug("%s: Instantiating requested simulator..." % str(self))

        simulator_service = SimulatorService()
        self.algorithm, connectivity_gid, simulation_state_gid = simulator_service.deserialize_simulator(
            simulator_gid, self.storage_path)
        self.branch_simulation_state_gid = simulation_state_gid

        # for monitor in self.algorithm.monitors:
        #     if issubclass(monitor, Projection):
        #         # TODO: add a service that loads a RM with Surface and Connectivity
        #         pass

        connectivity_index = dao.get_datatype_by_gid(connectivity_gid.hex)
        connectivity = h5.load_from_index(connectivity_index)

        connectivity.gid = connectivity_gid
        self.algorithm.connectivity = connectivity
        self.simulation_length = self.algorithm.simulation_length
        self.log.debug("%s: Initializing storage..." % str(self))
        try:
            self.algorithm.preconfigure()
        except ValueError as err:
            raise LaunchException(
                "Failed to configure simulator due to invalid Input Values. It could be because "
                "of an incompatibility between different version of TVB code.",
                err)
Exemplo n.º 20
0
    def index(self):
        des = SerializationManager(common.get_from_session(common.KEY_SIMULATOR_CONFIG))
        connectivity = des.conf.connectivity
        conn_idx = dao.get_datatype_by_gid(connectivity.hex)
        model = des.conf.model
        integrator = des.conf.integrator

        state_vars = model.state_variables
        noise_values = self.init_noise_config_values(model, integrator, conn_idx)
        initial_noise = self.group_noise_array_by_state_var(noise_values, state_vars, conn_idx.number_of_regions)

        current_project = common.get_current_project()
        file_handler = FilesHelper()
        conn_path = file_handler.get_project_folder(current_project, str(conn_idx.fk_from_operation))

        params = ConnectivityViewer.get_connectivity_parameters(conn_idx, conn_path)
        params.update({
            'title': 'Noise configuration',
            'mainContent': 'burst/noise',
            'isSingleMode': True,
            'submit_parameters_url': '/burst/noise/submit',
            'stateVars': state_vars,
            'stateVarsJson' : json.dumps(state_vars),
            'noiseInputValues' : initial_noise[0],
            'initialNoiseValues': json.dumps(initial_noise)
        })
        return self.fill_default_attributes(params, 'regionmodel')
Exemplo n.º 21
0
    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            #  Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.display_type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
Exemplo n.º 22
0
    def _compute_measure_params(self, region_mapping_volume, measure,
                                data_slice):
        # prepare the url that will project the measure onto the region volume map
        measure_h5_class, measure_h5_path = self._load_h5_of_gid(measure.gid)
        measure_h5 = measure_h5_class(measure_h5_path)
        min_value, max_value = measure_h5.get_min_max_values()
        measure_shape = measure_h5.array_data.shape
        if not data_slice:
            conn_index = dao.get_datatype_by_gid(
                region_mapping_volume.connectivity.load().hex)
            data_slice = self.get_default_slice(measure_shape,
                                                conn_index.number_of_regions)
            data_slice = slice_str(data_slice)
        url_volume_data = URLGenerator.build_url(
            self.stored_adapter.id,
            'get_mapped_array_volume_view',
            region_mapping_volume.gid.load(),
            parameter='')
        url_volume_data += 'mapped_array_gid=' + measure.gid + ';mapped_array_slice=' + data_slice + ';'

        return dict(minValue=min_value,
                    maxValue=max_value,
                    urlVolumeData=url_volume_data,
                    measureShape=slice_str(measure_shape),
                    measureSlice=data_slice)
def update_dt(dt_id, new_create_date):
    dt = dao.get_datatype_by_id(dt_id)
    dt.create_date = new_create_date
    dao.store_entity(dt)
    # Update MetaData in H5 as well.
    dt = dao.get_datatype_by_gid(dt.gid)
    dt.persist_full_metadata()
Exemplo n.º 24
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id], project.id)
Exemplo n.º 25
0
    def __upgrade_datatype_list(self, datatypes):
        """
        Upgrade a list of DataTypes to the current version.
        
        :param datatypes: The list of DataTypes that should be upgraded.

        :returns: (nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault) a two-tuple of integers representing
            the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
            some kind of fault occurred
        """
        nr_of_dts_upgraded_fine = 0
        nr_of_dts_upgraded_fault = 0
        for datatype in datatypes:
            specific_datatype = dao.get_datatype_by_gid(datatype.gid,
                                                        load_lazy=False)
            if isinstance(specific_datatype, MappedType):
                try:
                    self.upgrade_file(
                        specific_datatype.get_storage_file_path(),
                        specific_datatype)
                    nr_of_dts_upgraded_fine += 1
                except (MissingDataFileException,
                        FileVersioningException) as ex:
                    # The file is missing for some reason. Just mark the DataType as invalid.
                    datatype.invalid = True
                    dao.store_entity(datatype)
                    nr_of_dts_upgraded_fault += 1
                    self.log.exception(ex)
        return nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault
Exemplo n.º 26
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(
                    datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id],
                                        project.id)
Exemplo n.º 27
0
    def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
        """
        Find all DataTypes (including the linked ones and the groups) relevant for the current project.
        In case of a problem, will return an empty list.
        """
        metadata_list = []
        dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)

        for dt in dt_list:
            # Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
            data = {}
            is_group = False
            group_op = None
            dt_entity = dao.get_datatype_by_gid(dt.gid)
            if dt_entity is None:
                self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
                continue
            ## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
            if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
                is_group = True
                group_op = dt.parent_operation.operation_group

            # All these fields are necessary here for dynamic Tree levels.
            data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
            data[DataTypeMetaData.KEY_GID] = dt.gid
            data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type
            data[DataTypeMetaData.KEY_STATE] = dt.state
            data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
            data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
            data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
            data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id

            data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
            data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
            data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
            data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
            data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''

            # Operation related fields:
            operation_name = CommonDetails.compute_operation_name(
                dt.parent_operation.algorithm.algorithm_category.displayname,
                dt.parent_operation.algorithm.displayname)
            data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
            data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
            data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
            data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
            data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None

            completion_date = dt.parent_operation.completion_date
            string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
            string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
            data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
            data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
            data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month

            data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'

            metadata_list.append(DataTypeMetaData(data, dt.invalid))

        return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
    def index(self):
        current_user_id = common.get_logged_user().id
        # In case the number of dynamics gets big we should add a filter in the ui.
        dynamics = dao.get_dynamics_for_user(current_user_id)

        if not dynamics:
            return self.no_dynamics_page()

        sim_config = common.get_from_session(common.KEY_SIMULATOR_CONFIG)
        connectivity = sim_config.connectivity

        if connectivity is None:
            msg = 'You have to select a connectivity before setting up the region Model. '
            common.set_error_message(msg)
            raise ValueError(msg)

        current_project = common.get_current_project()
        file_handler = FilesHelper()
        conn_idx = dao.get_datatype_by_gid(connectivity.hex)
        conn_path = file_handler.get_project_folder(current_project, str(conn_idx.fk_from_operation))

        params = ConnectivityViewer.get_connectivity_parameters(conn_idx, conn_path)
        burst_config = common.get_from_session(common.KEY_BURST_CONFIG)

        params.update({
            'title': 'Model parameters',
            'mainContent': 'burst/model_param_region',
            'isSingleMode': True,
            'submit_parameters_url': '/burst/modelparameters/regions/submit_model_parameters',
            'dynamics': dynamics,
            'dynamics_json': self._dynamics_json(dynamics),
            'initial_dynamic_ids': burst_config.dynamic_ids
        })

        return self.fill_default_attributes(params, 'regionmodel')
Exemplo n.º 29
0
    def prepare_range_labels(operation_group, range_json):
        """
        Prepare Range labels for display in UI.
        When the current range_json is empty, returns None, [RANGE_MISSING_STRING], [RANGE_MISSING_STRING]

        :param operation_group: model.OperationGroup instance
        :param range_json: Stored JSON for for a given range
        :return: String with current range label, Array of ranged numbers, Array of labels for current range
        """
        contains_numbers, range_name, range_values = operation_group.load_range_numbers(
            range_json)

        if contains_numbers is None:
            return None, range_values, range_values, True

        if contains_numbers:
            range_labels = range_values
        else:
            # when datatypes are in range, get the display name for those and use as labels.
            range_labels = []
            for data_gid in range_values:
                range_labels.append(
                    dao.get_datatype_by_gid(data_gid).display_name)

        return range_name, range_values, range_labels, contains_numbers
Exemplo n.º 30
0
        def build():
            """
            Project src will have 3 datatypes, and a link to the VW from the dest project.
            Project dest will have the derived VW and links
            """
            # add a connectivity to src project and link it to dest project
            zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                    'connectivity', 'connectivity_96.zip')
            conn = TestFactory.import_zip_connectivity(self.dst_user,
                                                       self.src_project,
                                                       zip_path, "John")
            self.flow_service.create_link([conn.id], self.dest_project.id)

            # in dest derive a ValueWrapper from the linked conn
            vw_gid = TestFactory.create_value_wrapper(self.dst_user,
                                                      self.dest_project)[1]
            vw = dao.get_datatype_by_gid(vw_gid)
            # then link the time series in the src project
            self.flow_service.create_link([vw.id], self.src_project.id)

            assert 3 == len(dao.get_datatypes_in_project(self.src_project.id))
            assert 1 == len(
                dao.get_linked_datatypes_in_project(self.src_project.id))
            assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id))
            assert 3 == len(
                dao.get_linked_datatypes_in_project(self.dest_project.id))
Exemplo n.º 31
0
    def update_metadata(self, submit_data):
        """
        Update DataType/ DataTypeGroup metadata
        THROW StructureException when input data is invalid.
        """
        new_data = dict()
        for key in DataTypeOverlayDetails().meta_attributes_list:
            if key in submit_data:
                new_data[key] = submit_data[key]

        if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
            new_data[CommonDetails.CODE_OPERATION_TAG] = None
        try:
            if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
                    and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
                # We need to edit a group
                all_data_in_group = dao.get_datatype_in_group(operation_group_id=
                                                              new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
                if len(all_data_in_group) < 1:
                    raise StructureException("Inconsistent group, can not be updated!")
                datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
                all_data_in_group.append(datatype_group)
                for datatype in all_data_in_group:
                    new_data[CommonDetails.CODE_GID] = datatype.gid
                    self._edit_data(datatype, new_data, True)
            else:
                # Get the required DataType and operation from DB to store changes that will be done in XML.
                gid = new_data[CommonDetails.CODE_GID]
                datatype = dao.get_datatype_by_gid(gid)
                self._edit_data(datatype, new_data)
        except Exception, excep:
            self.logger.exception(excep)
            raise StructureException(str(excep))
Exemplo n.º 32
0
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """
        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)
            dt.persist_full_metadata()

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):   # datatype is a group
            set_group_descendants_visibility(datatype.id)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)

        # update the datatype or datatype group.
        set_visibility(datatype)
Exemplo n.º 33
0
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """

        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)

            h5_path = h5.path_for_stored_index(dt)
            with H5File.from_file(h5_path) as f:
                f.visible.store(is_visible)

        def set_group_descendants_visibility(datatype_group_id):
            datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
            for group_dt in datatypes_in_group:
                set_visibility(group_dt)

        datatype = dao.get_datatype_by_gid(datatype_gid)

        if isinstance(datatype, DataTypeGroup):  # datatype is a group
            set_group_descendants_visibility(datatype.id)
            datatype.visible = is_visible
            dao.store_entity(datatype)
        elif datatype.fk_datatype_group is not None:  # datatype is member of a group
            set_group_descendants_visibility(datatype.fk_datatype_group)
            # the datatype to be updated is the parent datatype group
            parent = dao.get_datatype_by_id(datatype.fk_datatype_group)
            parent.visible = is_visible
            dao.store_entity(parent)
        else:
            # update the single datatype.
            set_visibility(datatype)
Exemplo n.º 34
0
 def test_full_import(self):
     """
     Test that importing a CFF generates at least one DataType in DB.
     """
     all_dt = self.get_all_datatypes()
     self.assertEqual(0, len(all_dt))
     TestFactory.import_cff(cff_path=self.VALID_CFF,
                            test_user=self.test_user,
                            test_project=self.test_project)
     flow_service = FlowService()
     ### Check that at one Connectivity was persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.connectivity.Connectivity')
     self.assertEquals(len(gid_list), 1)
     ### Check that at one RegionMapping was persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces.RegionMapping')
     self.assertEquals(len(gid_list), 1)
     ### Check that at one LocalConnectivity was persisted
     gids = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces.LocalConnectivity')
     self.assertEquals(len(gids), 1)
     connectivity = dao.get_datatype_by_gid(gids[0][2])
     metadata = connectivity.get_metadata()
     self.assertEqual(metadata['Cutoff'], '40.0')
     self.assertEqual(metadata['Equation'], 'null')
     self.assertFalse(metadata['Invalid'])
     self.assertFalse(metadata['Is_nan'])
     self.assertEqual(metadata['Type'], 'LocalConnectivity')
     ### Check that at 2 Surfaces were persisted
     gid_list = flow_service.get_available_datatypes(
         self.test_project.id, 'tvb.datatypes.surfaces_data.SurfaceData')
     self.assertEquals(len(gid_list), 2)
Exemplo n.º 35
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                self.store_datatype(datatype)
            else:
                AlgorithmService.create_link([datatype_allready_in_tvb.id],
                                             project.id)
Exemplo n.º 36
0
    def __upgrade_datatype_list(self, datatypes):
        """
        Upgrade a list of DataTypes to the current version.
        
        :param datatypes: The list of DataTypes that should be upgraded.

        :returns: (nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault) a two-tuple of integers representing
            the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
            some kind of fault occurred
        """
        nr_of_dts_upgraded_fine = 0
        nr_of_dts_upgraded_fault = 0
        for datatype in datatypes:
            specific_datatype = dao.get_datatype_by_gid(datatype.gid)
            if isinstance(specific_datatype, MappedType):
                try:
                    self.upgrade_file(specific_datatype.get_storage_file_path())
                    nr_of_dts_upgraded_fine += 1
                except (MissingDataFileException, FileVersioningException) as ex:
                    # The file is missing for some reason. Just mark the DataType as invalid.
                    datatype.invalid = True
                    dao.store_entity(datatype)
                    nr_of_dts_upgraded_fault += 1
                    self.log.exception(ex)
        return nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault
Exemplo n.º 37
0
    def _remove_project_node_files(self,
                                   project_id,
                                   gid,
                                   skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    # This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = Operation(
                        dao.get_system_user().id, links[0].fk_to_project,
                        datatype.parent_operation.fk_from_algo,
                        datatype.parent_operation.parameters,
                        datatype.parent_operation.meta_data,
                        datatype.parent_operation.status,
                        datatype.parent_operation.start_date,
                        datatype.parent_operation.completion_date,
                        datatype.parent_operation.fk_operation_group,
                        datatype.parent_operation.additional_info,
                        datatype.parent_operation.user_group,
                        datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(
                        new_op_loaded)
                    full_path = h5.path_for_stored_index(datatype)
                    self.structure_helper.move_datatype(
                        datatype, to_project, str(new_op.id), full_path)
                    datatype.fk_from_operation = new_op.id
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                h5_path = h5.path_for_stored_index(datatype)
                self.structure_helper.remove_datatype_file(h5_path)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException(
                "Remove operation failed for unknown reasons.Please contact system administrator."
            )
Exemplo n.º 38
0
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
Exemplo n.º 39
0
    def build_structure_for_datatype(datatype_gid):

        datatype = dao.get_datatype_by_gid(datatype_gid)
        is_group = dao.is_datatype_group(datatype_gid)

        structure = NodeStructure(datatype_gid, datatype.type)
        structure.data = NodeData.build_node_for_datatype(datatype.id, datatype.display_name, is_group=is_group)
        return structure
Exemplo n.º 40
0
    def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
        """
        Method used for removing a dataType. If the given dataType is a DatatypeGroup
        or a dataType from a DataTypeGroup than this method will remove the entire group.
        The operation(s) used for creating the dataType(s) will also be removed.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype is None:
            self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
            return
        user = dao.get_user_for_datatype(datatype.id)
        freed_space = datatype.disk_size or 0
        is_datatype_group = False
        if dao.is_datatype_group(datatype_gid):
            is_datatype_group = True
            freed_space = dao.get_datatype_group_disk_size(datatype.id)
        elif datatype.fk_datatype_group is not None:
            is_datatype_group = True
            datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
            freed_space = dao.get_datatype_group_disk_size(datatype.id)

        operations_set = [datatype.fk_from_operation]

        correct = True

        if is_datatype_group:
            self.logger.debug("Removing datatype group %s" % datatype)
            data_list = dao.get_datatypes_from_datatype_group(datatype.id)
            for adata in data_list:
                self._remove_project_node_files(project_id, adata.gid, skip_validation)
                if adata.fk_from_operation not in operations_set:
                    operations_set.append(adata.fk_from_operation)

            datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
            dao.remove_datatype(datatype_gid)
            correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
        else:
            self.logger.debug("Removing datatype %s" % datatype)
            self._remove_project_node_files(project_id, datatype.gid, skip_validation)

        ## Remove Operation entity in case no other DataType needs them.
        project = dao.get_project_by_id(project_id)
        for operation_id in operations_set:
            dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
            if len(dependent_dt) > 0:
                ### Do not remove Operation in case DataType still exist referring it.
                continue
            correct = correct and dao.remove_entity(model.Operation, operation_id)
            ## Make sure Operation folder is removed
            self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)

        if not correct:
            raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))

        user.used_disk_space = user.used_disk_space - freed_space
        dao.store_entity(user)
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)
        
        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
        inserted = self.flow_service.get_available_datatypes(self.test_project.id,
                                                             "tvb.datatypes.arrays.MappedArray")[1]
        self.assertEqual(1, inserted, "Problems when inserting data")
        
        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        self.assertEqual(2, count_operations, "Invalid ops number before export!")

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")
        
        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id)
        self.assertEqual(0, len(result), "Project Not removed!")
        self.assertEqual(0, lng_, "Project Not removed!")
        
        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path, self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
        self.assertEqual(len(result), 1, "There should be only one project.")
        self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.")
        self.assertEqual(result[0].description, "test_desc", "The project description is not correct.")
        self.test_project = result[0]
        
        count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
        
        #1 op. - import cff; 2 op. - save the array wrapper;
        self.assertEqual(2, count_operations, "Invalid ops number after export and import !")
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly')
            self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly')
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(self.test_project.id, 
                                                            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        self.assertEqual(1, len(new_val), "One !=" + str(len(new_val)))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect")
        self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect")
        self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
Exemplo n.º 42
0
 def test_remove_value_wrapper(self):
     """
     Test the deletion of a value wrapper dataType
     """
     count_vals = self.count_all_entities(ValueWrapper)
     self.assertEqual(0, count_vals, "There should be no value wrapper")
     value_wrapper = self._create_value_wrapper()
     self.project_service.remove_datatype(self.test_project.id, value_wrapper.gid)
     res = dao.get_datatype_by_gid(value_wrapper.gid)
     self.assertEqual(None, res, "The value wrapper was not deleted.")
Exemplo n.º 43
0
    def test_remove_used_surface(self):
        """
        Tries to remove an used surface
        """
        mapping, mapping_count = self.flow_service.get_available_datatypes(self.test_project.id, RegionMapping)
        self.assertEquals(1, mapping_count, "There should be one Mapping.")
        mapping_gid = mapping[0][2]
        mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
        surface = dao.get_datatype_by_gid(mapping.surface.gid)
        self.assertEqual(surface.gid, mapping.surface.gid, "The surfaces should have the same GID")
        try:
            self.project_service.remove_datatype(self.test_project.id, surface.gid)
            self.fail("The surface should still be used by a RegionMapping " + str(surface.gid))
        except RemoveDataTypeException:
            #OK, do nothing
            pass

        res = dao.get_datatype_by_gid(surface.gid)
        self.assertEqual(surface.id, res.id, "A used surface was deleted")
    def set_datatype_visibility(datatype_gid, is_visible):
        """
        Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
        dataType group than this method will set the visibility for each dataType from this group.
        """
        datatype = dao.get_datatype_by_gid(datatype_gid)
        if datatype.fk_datatype_group is not None:
            datatype_gid = dao.get_datatype_by_id(datatype.fk_datatype_group).gid

        dao.set_datatype_visibility(datatype_gid, is_visible)
Exemplo n.º 45
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = projections.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
Exemplo n.º 46
0
 def _update_datatype_disk_size(self, file_path):
     """
     Computes and updates the disk_size attribute of the DataType, for which was created the given file.
     """
     file_handler = FilesHelper()
     datatype_gid = self._get_manager(file_path).get_gid_attribute()
     datatype = dao.get_datatype_by_gid(datatype_gid)
     
     if datatype is not None:
         datatype.disk_size = file_handler.compute_size_on_disk(file_path)
         dao.store_entity(datatype)
Exemplo n.º 47
0
 def _remove_entity(self, data_class, before_number):
     """
     Try to remove entity. Fail otherwise.
     """
     dts, count = self.flow_service.get_available_datatypes(self.test_project.id, data_class)
     self.assertEquals(count, before_number)
     for dt in dts:
         data_gid = dt[2]
         self.project_service.remove_datatype(self.test_project.id, data_gid)
         res = dao.get_datatype_by_gid(data_gid)
         self.assertEqual(None, res, "The entity was not deleted")
Exemplo n.º 48
0
 def test_remove_time_series(self):
     """
     Tests the happy flow for the deletion of a time series.
     """
     count_ts = self.count_all_entities(TimeSeries)
     self.assertEqual(0, count_ts, "There should be no time series")
     self._create_timeseries()
     series = self.get_all_entities(TimeSeries)
     self.assertEqual(1, len(series), "There should be only one time series")
     self.project_service.remove_datatype(self.test_project.id, series[0].gid)
     res = dao.get_datatype_by_gid(series[0].gid)
     self.assertEqual(None, res, "The time series was not deleted.")
    def __init__(self, datatype_gid):
        NodeStructure.__init__(self, datatype_gid, "")

        datatype_shape = DATATYPE_SHAPE
        if dao.is_datatype_group(datatype_gid):
            datatype_shape = DATATYPE_GROUP_SHAPE
        datatype = dao.get_datatype_by_gid(datatype_gid)

        node_data = NodeData(MAX_SHAPE_SIZE, DATATYPE_SHAPE_COLOR, datatype_shape,
                             NODE_DATATYPE_TYPE, datatype.id, datatype.display_name)

        self.name = str(datatype.type)
        self.data = node_data
Exemplo n.º 50
0
 def _get_linked_datatypes_storage_path(project):
     """
     :return: the file paths to the datatypes that are linked in `project`
     """
     paths = []
     for lnk_dt in dao.get_linked_datatypes_in_project(project.id):
         # get datatype as a mapped type
         lnk_dt = dao.get_datatype_by_gid(lnk_dt.gid)
         if lnk_dt.storage_path is not None:
             paths.append(lnk_dt.get_storage_file_path())
         else:
             LOG.warning("Problem when trying to retrieve path on %s:%s for export!" % (lnk_dt.type, lnk_dt.gid))
     return paths
Exemplo n.º 51
0
    def _remove_project_node_files(self, project_id, gid, skip_validation=False):
        """
        Delegate removal of a node in the structure of the project.
        In case of a problem will THROW StructureException.
        """
        try:
            project = self.find_project(project_id)
            datatype = dao.get_datatype_by_gid(gid)
            links = dao.get_links_for_datatype(datatype.id)
            if links:
                was_link = False
                for link in links:
                    #This means it's only a link and we need to remove it
                    if link.fk_from_datatype == datatype.id and link.fk_to_project == project.id:
                        dao.remove_entity(model.Links, link.id)
                        was_link = True
                if not was_link:
                    # Create a clone of the operation
                    new_op = model.Operation(dao.get_system_user().id,
                                             links[0].fk_to_project,
                                             datatype.parent_operation.fk_from_algo,
                                             datatype.parent_operation.parameters,
                                             datatype.parent_operation.meta_data,
                                             datatype.parent_operation.method_name,
                                             datatype.parent_operation.status,
                                             datatype.parent_operation.start_date,
                                             datatype.parent_operation.completion_date,
                                             datatype.parent_operation.fk_operation_group,
                                             datatype.parent_operation.additional_info,
                                             datatype.parent_operation.user_group,
                                             datatype.parent_operation.range_values)
                    new_op = dao.store_entity(new_op)
                    to_project = self.find_project(links[0].fk_to_project).name
                    new_op_loaded = dao.get_operation_by_id(new_op.id)
                    self.structure_helper.write_operation_metadata(new_op_loaded)
                    self.structure_helper.move_datatype(datatype, to_project, str(new_op.id))
                    datatype.set_operation_id(new_op.id)
                    datatype.parent_operation = new_op
                    dao.store_entity(datatype)
                    dao.remove_entity(model.Links, links[0].id)
            else:
                specific_remover = get_remover(datatype.type)(datatype)
                specific_remover.remove_datatype(skip_validation)
                self.structure_helper.remove_datatype(datatype)

        except RemoveDataTypeException:
            self.logger.exception("Could not execute operation Node Remove!")
            raise
        except FileStructureException:
            self.logger.exception("Remove operation failed")
            raise StructureException("Remove operation failed for unknown reasons.Please contact system administrator.")
Exemplo n.º 52
0
def update():
    """
    Try to import Default_Project, so that new users created with the latest code can share this project.
    """

    try:
        all_surfaces = dao.get_generic_entity(model.DataType, "tvb.datatypes.surfaces", "module")
        for srf in all_surfaces:
            surface = dao.get_datatype_by_gid(srf.gid)
            if isinstance(surface, Surface):
                surface._find_edge_lengths()
                dao.store_entity(surface)
    except Exception:
        LOGGER.exception("Could update Surface entities!")
Exemplo n.º 53
0
def load_entity_by_gid(data_gid):
    """
    Load a generic DataType, specified by GID.
    """
    datatype = dao.get_datatype_by_gid(data_gid)
    if isinstance(datatype, MappedType):
        datatype_path = datatype.get_storage_file_path()
        files_update_manager = FilesUpdateManager()
        if not files_update_manager.is_file_up_to_date(datatype_path):
            datatype.invalid = True
            dao.store_entity(datatype)
            raise FileVersioningException("Encountered DataType with an incompatible storage or data version. "
                                          "The DataType was marked as invalid.")
    return datatype
Exemplo n.º 54
0
 def test_remove_array_wrapper(self):
     """
     Tests the happy flow for the deletion of an array wrapper.
     """
     count_array = self.count_all_entities(MappedArray)
     self.assertEqual(1, count_array)
     data = {'param_1': 'some value'}
     OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
     array_wrappers = self.get_all_entities(MappedArray)
     self.assertEqual(2, len(array_wrappers))
     array_gid = array_wrappers[0].gid
     self.project_service.remove_datatype(self.test_project.id, array_gid)
     res = dao.get_datatype_by_gid(array_gid)
     self.assertEqual(None, res, "The array wrapper was not deleted.")
def update():
    """
    Update Surface metadata
    """

    try:
        all_surfaces = dao.get_generic_entity(model.DataType, "tvb.datatypes.surfaces", "module")
        for srf in all_surfaces:
            surface = dao.get_datatype_by_gid(srf.gid)
            if isinstance(surface, Surface):
                surface._find_edge_lengths()
                dao.store_entity(surface)
                surface.persist_full_metadata()
    except Exception:
        LOGGER.exception("Could update Surface entities!")
Exemplo n.º 56
0
 def create_timeseries(self, connectivity, ts_type=None, sensors=None):
     """
     Create a stored TimeSeries entity.
     """
     operation, _, storage_path = self.__create_operation()
     if ts_type == "EEG":
         time_series = TimeSeriesEEG(storage_path=storage_path, sensors=sensors)
     else:
         time_series = TimeSeriesRegion(storage_path=storage_path, connectivity=connectivity)
     data = numpy.random.random((10, 10, 10, 10))
     time = numpy.arange(10)
     time_series.write_data_slice(data)
     time_series.write_time_slice(time)
     adapter_instance = StoreAdapter([time_series])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     time_series = dao.get_datatype_by_gid(time_series.gid)
     return time_series
 def __datatype2metastructure(row, dt_ids):
     """
     Convert a list of data retrieved from DB and create a DataTypeMetaData object.
     """
     data = {}
     is_group = False
     group = None
     if row[7] is not None and row[7] and row[14] in dt_ids:
         is_group = True
         group = dao.get_generic_entity(model.OperationGroup, row[7])
         if group and len(group):
             group = group[0]
         else:
             is_group = False
     datatype_group = None
     if row[14] is not None and row[14] in dt_ids:
         datatype_group = dao.get_datatype_by_id(row[14])
     dt_entity = dao.get_datatype_by_gid(row[9])
     data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
     ## All these fields are necessary here for dynamic Tree levels.
     data[DataTypeMetaData.KEY_NODE_TYPE] = datatype_group.type if datatype_group is not None else row[0]
     data[DataTypeMetaData.KEY_STATE] = row[1]
     data[DataTypeMetaData.KEY_SUBJECT] = str(row[2])
     operation_name = CommonDetails.compute_operation_name(row[3], row[4], row[5])
     data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
     data[DataTypeMetaData.KEY_AUTHOR] = row[6]
     data[DataTypeMetaData.KEY_OPERATION_TAG] = group.name if is_group else row[8]
     data[DataTypeMetaData.KEY_OP_GROUP_ID] = group.id if is_group else None
     data[DataTypeMetaData.KEY_GID] = datatype_group.gid if datatype_group is not None else row[9]
     data[DataTypeMetaData.KEY_DATE] = date2string(row[10]) if (row[10] is not None) else ''
     data[DataTypeMetaData.KEY_DATATYPE_ID] = datatype_group.id if datatype_group is not None else row[11]
     data[DataTypeMetaData.KEY_LINK] = row[12]
     data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = row[5]
     date_string = row[10].strftime(MONTH_YEAR_FORMAT) if row[10] is not None else ""
     data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = date_string
     date_string = row[10].strftime(DAY_MONTH_YEAR_FORMAT) if row[10] is not None else ""
     data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = date_string
     data[DataTypeMetaData.KEY_BURST] = row[15] if row[15] is not None else '-None-'
     data[DataTypeMetaData.KEY_TAG_1] = row[16] if row[16] else ''
     data[DataTypeMetaData.KEY_TAG_2] = row[17] if row[17] else ''
     data[DataTypeMetaData.KEY_TAG_3] = row[18] if row[18] else ''
     data[DataTypeMetaData.KEY_TAG_4] = row[19] if row[19] else ''
     data[DataTypeMetaData.KEY_TAG_5] = row[20] if row[20] else ''
     data[DataTypeMetaData.KEY_RELEVANCY] = True if row[21] > 0 else False
     invalid = True if row[13] else False
     return DataTypeMetaData(data, invalid)
Exemplo n.º 58
0
 def test_handle_event(self):
     """
     Test a defined handler for the store project method.
     """
     path_to_events = os.path.dirname(__file__)
     event_handlers.read_events([path_to_events])
     data = dict(name="test_project", description="test_description", users=[])
     initial_projects = dao.get_projects_for_user(self.test_user.id)
     self.assertEqual(len(initial_projects), 0, "Database reset probably failed!")
     test_project = self.project_service.store_project(self.test_user, True, None, **data)
     # Operations will start asynchronously; Give them time.
     time.sleep(1)
     gid = dao.get_last_data_with_uid("test_uid")
     self.assertTrue(gid is not None, "Nothing was stored in database!")
     datatype = dao.get_datatype_by_gid(gid)
     self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!")
     self.project_service._remove_project_node_files(test_project.id, gid)
Exemplo n.º 59
0
 def test_update_meta_data_simple(self):
     """
     Test the new update metaData for a simple data that is not part of a group.
     """
     inserted_project, gid, _ = self._create_value_wrapper(self.test_user)
     new_meta_data = {DataTypeOverlayDetails.DATA_SUBJECT: "new subject",
                      DataTypeOverlayDetails.DATA_STATE: "second_state",
                      DataTypeOverlayDetails.CODE_GID: gid,
                      DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group'}
     self.project_service.update_metadata(new_meta_data)
     
     new_datatype = dao.get_datatype_by_gid(gid)
     self.__check_meta_data(new_meta_data, new_datatype)
     
     op_path = FilesHelper().get_operation_meta_file_path(inserted_project.name, new_datatype.parent_operation.id)
     op_meta = XMLReader(op_path).read_metadata()
     self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!')