コード例 #1
0
ファイル: tvb_export.py プロジェクト: nuuria8/tvb-root
    def copy_dt_to_export_folder(self, data, data_export_folder):
        data_path = h5.path_for_stored_index(data)
        file_destination = os.path.join(data_export_folder,
                                        os.path.basename(data_path))
        if not os.path.exists(file_destination):
            FilesHelper().copy_file(data_path, file_destination)
        H5File.remove_metadata_param(file_destination, 'parent_burst')

        return file_destination
コード例 #2
0
    def _edit_data(self, datatype, new_data, from_group=False):
        # type: (DataType, dict, bool) -> None
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(
                OperationGroup,
                new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup,
                                                      new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name +
                                             "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)

        # 2. Update GenericAttributes on DataType index and in the associated H5 files:
        h5_path = h5.path_for_stored_index(datatype)
        with H5File.from_file(h5_path) as f:
            ga = f.load_generic_attributes()

        ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
        ga.state = new_data[DataTypeOverlayDetails.DATA_STATE]
        if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
            ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
        if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
            ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
        if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
            ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
        if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
            ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
        if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
            ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

        datatype.fill_from_generic_attributes(ga)
        datatype = dao.store_entity(datatype)
        # 3. Update MetaData in DT H5 as well.
        with H5File.from_file(h5_path) as f:
            f.store_generic_attributes(ga, False)
コード例 #3
0
 def _capture_operation_results(self, result):
     """
     Update h5 files with generic attributes
     """
     for file in os.listdir(self._get_output_path()):
         path = os.path.join(self._get_output_path(), file)
         if issubclass(H5File.h5_class_from_file(path), ViewModelH5):
             continue
         with H5File.from_file(path) as f:
             f.store_generic_attributes(self.generic_attributes)
     return "", 2
コード例 #4
0
def gather_all_references_of_view_model(gid, base_dir, ref_files):
    vm_path = determine_filepath(gid, base_dir)
    ref_files.append(vm_path)
    view_model_class = H5File.determine_type(vm_path)
    view_model = view_model_class()

    with ViewModelH5(vm_path, view_model) as vm_h5:
        references = vm_h5.gather_references()
        uuids = vm_h5.gather_references_by_uuid()

        for _, gid in references:
            if not gid:
                continue
            if isinstance(gid, (list, tuple)):
                for list_gid in gid:
                    gather_all_references_of_view_model(
                        list_gid, base_dir, ref_files)
            else:
                gather_all_references_of_view_model(gid, base_dir, ref_files)

        uuid_files = []
        for _, gid in uuids:
            if not gid:
                continue
            index = load_entity_by_gid(gid.hex)
            h5_file = h5_file_for_index(index)
            uuid_files.append(h5_file.path)
            gather_all_references_by_index(h5_file, uuid_files)
        ref_files.extend(uuid_files)
コード例 #5
0
def load_view_model_from_file(filepath):
    # type: (str) -> ViewModel
    """
    Load a ViewModel object by reading the H5 file specified by filepath.
    """
    base_dir = os.path.dirname(filepath)
    view_model_class = H5File.determine_type(filepath)
    view_model = view_model_class()

    with ViewModelH5(filepath, view_model) as h5_file:
        h5_file.load_into(view_model)
        references = h5_file.gather_references()
        view_model.create_date = string2date(h5_file.create_date.load())
        view_model.generic_attributes = h5_file.load_generic_attributes()
        for trait_attr, gid in references:
            if not gid:
                continue
            if isinstance(gid, list):
                loaded_ref = []
                for idx, sub_gid in enumerate(gid):
                    ref = load_view_model(sub_gid, base_dir)
                    loaded_ref.append(ref)
            else:
                loaded_ref = load_view_model(gid, base_dir)
            setattr(view_model, trait_attr.field_name, loaded_ref)
    return view_model
コード例 #6
0
ファイル: _h5loader.py プロジェクト: JulieCB/tvb-root
    def load_complete_by_function(self, file_path, load_ht_function):
        # type: (str, callable) -> (HasTraits, GenericAttributes)
        with H5File.from_file(file_path) as f:
            try:
                datatype_cls = self.registry.get_datatype_for_h5file(f)
            except KeyError:
                datatype_cls = f.determine_datatype_from_file()
            datatype = datatype_cls()
            f.load_into(datatype)
            ga = f.load_generic_attributes()
            sub_dt_refs = f.gather_references(datatype_cls)

        for traited_attr, sub_gid in sub_dt_refs:
            if sub_gid is None:
                continue
            is_monitor = False
            if isinstance(sub_gid, list):
                sub_gid = sub_gid[0]
                is_monitor = True
            ref_ht = load_ht_function(sub_gid, traited_attr)
            if is_monitor:
                ref_ht = [ref_ht]
            setattr(datatype, traited_attr.field_name, ref_ht)

        return datatype, ga
コード例 #7
0
ファイル: _h5loader.py プロジェクト: JulieCB/tvb-root
    def load(self, gid=None, fname=None):
        # type: (typing.Union[uuid.UUID, str], str) -> HasTraits
        """
        Load from file a HasTraits entity. Either gid or fname should be given, or else an error is raised.

        :param gid: optional entity GUID to search for it under self.base_dir
        :param fname: optional file name to search for it under self.base_dir.
        :return: HasTraits instance read from the given location
        """
        if fname is None:
            if gid is None:
                raise ValueError("Neither gid nor filename is provided to load!")
            fname = self.find_file_by_gid(gid)

        sub_dt_refs = []

        with H5File.from_file(fname) as f:
            datatype_cls = self.registry.get_datatype_for_h5file(f)
            datatype = datatype_cls()
            f.load_into(datatype)

            if self.recursive:
                sub_dt_refs = f.gather_references()

        for traited_attr, sub_gid in sub_dt_refs:
            if sub_gid is not None:
                subdt = self.load(sub_gid)
                setattr(datatype, traited_attr.field_name, subdt)

        return datatype
コード例 #8
0
ファイル: _h5loader.py プロジェクト: bvalean/tvb-root
    def load(self, gid=None, fname=None):
        # type: (typing.Union[uuid.UUID, str], str) -> ViewModel
        """
        Load a ViewModel object by reading the H5 file with the given GID, from the directory self.base_dir
        """
        if fname is None:
            if gid is None:
                raise ValueError(
                    "Neither gid nor filename is provided to load!")
            fname = self.find_file_by_gid(gid)
        else:
            fname = os.path.join(self.base_dir, fname)

        view_model_class = H5File.determine_type(fname)
        view_model = view_model_class()

        has_traits_h5 = self.registry.get_h5file_for_datatype(
            view_model.__class__)
        if has_traits_h5 != H5File:
            with has_traits_h5(fname) as file:
                self._load(file, view_model)
        else:
            with ViewModelH5(fname, view_model) as h5_file:
                self._load(h5_file, view_model)
        return view_model
コード例 #9
0
ファイル: _h5loader.py プロジェクト: bvalean/tvb-root
    def gather_reference_files(self,
                               gid,
                               vm_ref_files,
                               dt_ref_files,
                               load_dts=None):
        vm_path = self.find_file_by_gid(gid)
        vm_ref_files.append(vm_path)
        view_model_class = H5File.determine_type(vm_path)
        view_model = view_model_class()

        with ViewModelH5(vm_path, view_model) as vm_h5:
            references = vm_h5.gather_references()

            for _, gid in references:
                if not gid:
                    continue
                if isinstance(gid, (list, tuple)):
                    for list_gid in gid:
                        self.gather_reference_files(list_gid, vm_ref_files,
                                                    dt_ref_files, load_dts)
                else:
                    self.gather_reference_files(gid, vm_ref_files,
                                                dt_ref_files, load_dts)
            if load_dts:
                load_dts(vm_h5, dt_ref_files)
コード例 #10
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes):
        # type: (Project, dict) -> int
        sorted_dts = sorted(
            all_datatypes.items(),
            key=lambda dt_item: dt_item[1].create_date or datetime.now())

        count = 0
        for dt_path, datatype in sorted_dts:
            datatype_already_in_tvb = dao.get_datatype_by_gid(datatype.gid)
            if not datatype_already_in_tvb:
                self.store_datatype(datatype, dt_path)
                count += 1
            else:
                AlgorithmService.create_link([datatype_already_in_tvb.id],
                                             project.id)

            file_path = h5.h5_file_for_index(datatype).path
            h5_class = H5File.h5_class_from_file(file_path)
            reference_list = h5_class(file_path).gather_references()

            for _, reference_gid in reference_list:
                if not reference_gid:
                    continue

                ref_index = dao.get_datatype_by_gid(reference_gid.hex)
                if ref_index is None:
                    os.remove(file_path)
                    dao.remove_entity(datatype.__class__, datatype.id)
                    raise MissingReferenceException(
                        'Imported file depends on datatypes that do not exist. Please upload '
                        'those first!')

        return count
コード例 #11
0
ファイル: tvb_linked_export.py プロジェクト: nuuria8/tvb-root
    def copy_dt_to_export_folder(self, data, data_export_folder):
        data_path = h5.path_for_stored_index(data)
        with H5File.from_file(data_path) as f:
            file_destination = os.path.join(data_export_folder,
                                            os.path.basename(data_path))
            if not os.path.exists(file_destination):
                FilesHelper().copy_file(data_path, file_destination)

            sub_dt_refs = f.gather_references()

            for _, ref_gid in sub_dt_refs:
                if ref_gid:
                    dt = load.load_entity_by_gid(ref_gid)
                    self.copy_dt_to_export_folder(dt, data_export_folder)

        H5File.remove_metadata_param(file_destination, 'parent_burst')
コード例 #12
0
ファイル: _h5loader.py プロジェクト: JulieCB/tvb-root
    def load(self, gid=None, fname=None):
        # type: (typing.Union[uuid.UUID, str], str) -> ViewModel
        """
        Load a ViewModel object by reading the H5 file with the given GID, from the directory self.base_dir
        """
        if fname is None:
            if gid is None:
                raise ValueError("Neither gid nor filename is provided to load!")
            fname = self.find_file_by_gid(gid)
        else:
            fname = os.path.join(self.base_dir, fname)

        view_model_class = H5File.determine_type(fname)
        view_model = view_model_class()

        with ViewModelH5(fname, view_model) as h5_file:
            h5_file.load_into(view_model)
            references = h5_file.gather_references()
            view_model.create_date = string2date(h5_file.create_date.load())
            view_model.generic_attributes = h5_file.load_generic_attributes()
            for trait_attr, gid in references:
                if not gid:
                    continue
                if isinstance(gid, list):
                    loaded_ref = []
                    for idx, sub_gid in enumerate(gid):
                        ref = self.load(sub_gid)
                        loaded_ref.append(ref)
                else:
                    loaded_ref = self.load(gid)
                setattr(view_model, trait_attr.field_name, loaded_ref)
        return view_model
コード例 #13
0
ファイル: project_service.py プロジェクト: nuuria8/tvb-root
        def set_visibility(dt):
            """ set visibility flag, persist in db and h5"""
            dt.visible = is_visible
            dt = dao.store_entity(dt)

            h5_path = h5.path_for_stored_index(dt)
            with H5File.from_file(h5_path) as f:
                f.visible.store(is_visible)
コード例 #14
0
ファイル: export_manager.py プロジェクト: nuuria8/tvb-root
    def export_simulator_configuration(self, burst_id):
        burst = dao.get_burst_by_id(burst_id)
        if burst is None:
            raise InvalidExportDataException("Could not find burst with ID " +
                                             str(burst_id))

        op_folder = self.files_helper.get_project_folder(
            burst.project, str(burst.fk_simulation))
        tmp_export_folder = self._build_data_export_folder(burst)
        tmp_sim_folder = os.path.join(tmp_export_folder,
                                      self.EXPORTED_SIMULATION_NAME)

        if not os.path.exists(tmp_sim_folder):
            os.makedirs(tmp_sim_folder)

        all_view_model_paths, all_datatype_paths = h5.gather_references_of_view_model(
            burst.simulator_gid, op_folder)

        burst_path = h5.determine_filepath(burst.gid, op_folder)
        all_view_model_paths.append(burst_path)

        for vm_path in all_view_model_paths:
            dest = os.path.join(tmp_sim_folder, os.path.basename(vm_path))
            self.files_helper.copy_file(vm_path, dest)

        for dt_path in all_datatype_paths:
            dest = os.path.join(tmp_sim_folder,
                                self.EXPORTED_SIMULATION_DTS_DIR,
                                os.path.basename(dt_path))
            self.files_helper.copy_file(dt_path, dest)

        main_vm_path = h5.determine_filepath(burst.simulator_gid,
                                             tmp_sim_folder)
        H5File.remove_metadata_param(main_vm_path, 'history_gid')

        now = datetime.now()
        date_str = now.strftime("%Y-%m-%d_%H-%M")
        zip_file_name = "%s_%s.%s" % (date_str, str(burst_id),
                                      self.ZIP_FILE_EXTENSION)

        result_path = os.path.join(tmp_export_folder, zip_file_name)
        with TvbZip(result_path, "w") as zip_file:
            zip_file.write_folder(tmp_sim_folder)

        self.files_helper.remove_folder(tmp_sim_folder)
        return result_path
コード例 #15
0
    def load(self, source):
        # type: (str) -> HasTraits

        with H5File.from_file(source) as f:
            datatype_cls = self.registry.get_datatype_for_h5file(type(f))
            datatype = datatype_cls()
            f.load_into(datatype)
            return datatype
コード例 #16
0
def update_written_by(folder):
    for root, _, files in os.walk(folder):
        for file_name in files:
            if file_name.endswith(".h5"):
                full_path = os.path.join(root, file_name)
                with H5File(full_path) as f:
                    prev_h5_path = f.written_by.load()
                    new_h5_path = prev_h5_path.replace("tvb.core.entities.file.datatypes", "tvb.adapters.datatypes.h5")
                    f.written_by.store(new_h5_path)
コード例 #17
0
    def __gather_datatypes_for_copy(self, data, dt_path_list):
        data_path = h5.path_for_stored_index(data)
        dt_path_list.append(data_path)
        with H5File.from_file(data_path) as f:
            sub_dt_refs = f.gather_references()

            for _, ref_gid in sub_dt_refs:
                if ref_gid:
                    dt = load.load_entity_by_gid(ref_gid)
                    self.__gather_datatypes_for_copy(dt, dt_path_list)
コード例 #18
0
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]

        count_stored = 0
        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            res.subject = self.generic_attributes.subject
            res.state = self.generic_attributes.state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            res.user_tag_1 = self.generic_attributes.user_tag_1
            res.user_tag_2 = self.generic_attributes.user_tag_2
            res.fk_datatype_group = data_type_group_id
            # Compute size-on disk, in case file-storage is used
            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
                with H5File.from_file(associated_file) as f:
                    f.store_generic_attributes(self.generic_attributes)
            dao.store_entity(res)
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
コード例 #19
0
ファイル: abcadapter.py プロジェクト: zuxfoucault/tvb-root
    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id

        count_stored = 0
        if result is None:
            return "", count_stored

        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            if not res.fixed_generic_attributes:
                res.fill_from_generic_attributes(self.generic_attributes)
            res.fk_from_operation = self.operation_id
            res.fk_datatype_group = data_type_group_id

            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                if not res.fixed_generic_attributes:
                    with H5File.from_file(associated_file) as f:
                        f.store_generic_attributes(self.generic_attributes)
                # Compute size-on disk, in case file-storage is used
                res.disk_size = self.storage_interface.compute_size_on_disk(
                    associated_file)

            dao.store_entity(res)
            res.after_store()
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored
コード例 #20
0
ファイル: configurations_h5.py プロジェクト: yop0/tvb-root
    def load_from_reference(self, gid):
        config_path = self.get_reference_path(gid)

        config_h5 = H5File.from_file(config_path)

        config_type = config_h5.type.load()
        package, cls_name = config_type.rsplit('.', 1)
        module = importlib.import_module(package)
        config_class = getattr(module, cls_name)

        config_instance = config_class()
        config_h5.load_into(config_instance)
        config_h5.close()

        return config_instance
コード例 #21
0
    def check_import_references(file_path, datatype):
        h5_class = H5File.h5_class_from_file(file_path)
        reference_list = h5_class(file_path).gather_references()

        for _, reference_gid in reference_list:
            if not reference_gid:
                continue

            ref_index = load.load_entity_by_gid(reference_gid)
            if ref_index is None:
                os.remove(file_path)
                dao.remove_entity(datatype.__class__, datatype.id)
                raise MissingReferenceException(
                    'Imported file depends on datatypes that do not exist. Please upload '
                    'those first!')
コード例 #22
0
ファイル: _h5loader.py プロジェクト: yop0/tvb-root
    def load_complete_by_function(self, file_path, load_ht_function):
        # type: (str, callable) -> (HasTraits, GenericAttributes)
        with H5File.from_file(file_path) as f:
            datatype_cls = self.registry.get_datatype_for_h5file(type(f))
            datatype = datatype_cls()
            f.load_into(datatype)
            ga = f.load_generic_attributes()
            sub_dt_refs = f.gather_references()

        for traited_attr, sub_gid in sub_dt_refs:
            if sub_gid is None:
                continue
            ref_ht = load_ht_function(sub_gid, traited_attr)
            setattr(datatype, traited_attr.field_name, ref_ht)

        return datatype, ga
コード例 #23
0
    def load_with_references(self, file_path):
        # type: (str) -> (HasTraits, GenericAttributes)
        with H5File.from_file(file_path) as f:
            datatype_cls = self.registry.get_datatype_for_h5file(type(f))
            datatype = datatype_cls()
            f.load_into(datatype)
            ga = f.load_generic_attributes()
            sub_dt_refs = f.gather_references()

        for traited_attr, sub_gid in sub_dt_refs:
            if sub_gid is None:
                continue
            ref_idx = dao.get_datatype_by_gid(sub_gid.hex, load_lazy=False)
            ref_ht = self.load_from_index(ref_idx, traited_attr.field_type)
            setattr(datatype, traited_attr.field_name, ref_ht)

        return datatype, ga
コード例 #24
0
    def load_from_reference(self, gid):
        dir_loader = h5.DirLoader(os.path.dirname(self.path), h5.REGISTRY)
        config_filename = dir_loader.find_file_name(gid)
        config_path = os.path.join(dir_loader.base_dir, config_filename)

        config_h5 = H5File.from_file(config_path)

        config_type = config_h5.type.load()
        package, cls_name = config_type.rsplit('.', 1)
        module = importlib.import_module(package)
        config_class = getattr(module, cls_name)

        config_instance = config_class()
        config_h5.load_into(config_instance)
        config_h5.close()

        return config_instance
コード例 #25
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
コード例 #26
0
    def _add_links_for_datatype_references(datatype, fk_to_project, link_to_delete, existing_dt_links):
        # If we found a datatype that has links, we need to link those as well to the linked project
        # so they can be also copied

        linked_datatype_paths = []
        h5_file = h5.h5_file_for_index(datatype)
        h5.gather_all_references_by_index(h5_file, linked_datatype_paths)

        for h5_path in linked_datatype_paths:
            if existing_dt_links is not None and h5_path in existing_dt_links:
                continue

            gid = H5File.get_metadata_param(h5_path, 'gid')
            dt_index = h5.load_entity_by_gid(uuid.UUID(gid))
            new_link = Links(dt_index.id, fk_to_project)
            dao.store_entity(new_link)

        dao.remove_entity(Links, link_to_delete)
        return linked_datatype_paths
コード例 #27
0
    def load(self, gid):
        # type: (typing.Union[uuid.UUID, str]) -> HasTraits
        fname = self.find_file_name(gid)

        sub_dt_refs = []

        with H5File.from_file(os.path.join(self.base_dir, fname)) as f:
            datatype_cls = self.registry.get_datatype_for_h5file(type(f))
            datatype = datatype_cls()
            f.load_into(datatype)

            if self.recursive:
                sub_dt_refs = f.gather_references()

        for traited_attr, sub_gid in sub_dt_refs:
            subdt = self.load(sub_gid)
            setattr(datatype, traited_attr.field_name, subdt)

        return datatype
コード例 #28
0
ファイル: project_service.py プロジェクト: nuuria8/tvb-root
    def _edit_data(self, datatype, new_data, from_group=False):
        # type: (DataType, dict, bool) -> None
        """
        Private method, used for editing a meta-data XML file and a DataType row
        for a given custom DataType entity with new dictionary of data from UI.
        """
        # 1. First update Operation fields:
        #    Update group field if possible
        new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
        empty_group_value = (new_group_name is None or new_group_name == "")
        if from_group:
            if empty_group_value:
                raise StructureException("Empty group is not allowed!")

            group = dao.get_generic_entity(OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
            if group and len(group) > 0 and new_group_name != group[0].name:
                group = group[0]
                exists_group = dao.get_generic_entity(OperationGroup, new_group_name, 'name')
                if exists_group:
                    raise StructureException("Group '" + new_group_name + "' already exists.")
                group.name = new_group_name
                dao.store_entity(group)
        else:
            operation = dao.get_operation_by_id(datatype.fk_from_operation)
            operation.user_group = new_group_name
            dao.store_entity(operation)
            op_folder = self.structure_helper.get_project_folder(operation.project, str(operation.id))
            vm_gid = operation.view_model_gid
            view_model_file = h5.determine_filepath(vm_gid, op_folder)
            if view_model_file:
                view_model_class = H5File.determine_type(view_model_file)
                view_model = view_model_class()
                with ViewModelH5(view_model_file, view_model) as f:
                    ga = f.load_generic_attributes()
                    ga.operation_tag = new_group_name
                    f.store_generic_attributes(ga, False)
            else:
                self.logger.warning("Could not find ViewModel H5 file for op: {}".format(operation))

        # 2. Update GenericAttributes in the associated H5 files:
        h5_path = h5.path_for_stored_index(datatype)
        with H5File.from_file(h5_path) as f:
            ga = f.load_generic_attributes()

            ga.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
            ga.state = new_data[DataTypeOverlayDetails.DATA_STATE]
            ga.operation_tag = new_group_name
            if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
                ga.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
            if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
                ga.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
            if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
                ga.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
            if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
                ga.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
            if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
                ga.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]

            f.store_generic_attributes(ga, False)

        # 3. Update MetaData in DT Index DB as well.
        datatype.fill_from_generic_attributes(ga)
        dao.store_entity(datatype)
コード例 #29
0
def index_for_h5_file(source_path):
    # type: (str) -> typing.Type[DataType]
    """"""
    h5_class = H5File.h5_class_from_file(source_path)
    return REGISTRY.get_index_for_h5file(h5_class)
コード例 #30
0
    def _retrieve_operations_in_order(self, project, import_path):
        # type: (Project, str) -> list[Operation2ImportData]
        retrieved_operations = []

        for root, _, files in os.walk(import_path):
            if OPERATION_XML in files:
                # Previous Operation format for uploading previous versions of projects
                operation_file_path = os.path.join(root, OPERATION_XML)
                operation, operation_xml_parameters = self.__build_operation_from_file(
                    project, operation_file_path)
                operation.import_file = operation_file_path
                self.logger.debug("Found operation in old XML format: " +
                                  str(operation))
                retrieved_operations.append(
                    Operation2ImportData(
                        operation,
                        root,
                        info_from_xml=operation_xml_parameters))

            else:
                # We strive for the new format with ViewModelH5
                main_view_model = None
                dt_paths = []
                all_view_model_files = []
                for file in files:
                    if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                        h5_file = os.path.join(root, file)
                        try:
                            h5_class = H5File.h5_class_from_file(h5_file)
                            if h5_class is ViewModelH5:
                                all_view_model_files.append(h5_file)
                                if not main_view_model:
                                    view_model = h5.load_view_model_from_file(
                                        h5_file)
                                    if type(view_model
                                            ) in VIEW_MODEL2ADAPTER.keys():
                                        main_view_model = view_model
                            else:
                                file_update_manager = FilesUpdateManager()
                                file_update_manager.upgrade_file(h5_file)
                                dt_paths.append(h5_file)
                        except Exception:
                            self.logger.warning(
                                "Unreadable H5 file will be ignored: %s" %
                                h5_file)

                if main_view_model is not None:
                    alg = VIEW_MODEL2ADAPTER[type(main_view_model)]
                    operation = Operation(main_view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          user_group=main_view_model.
                                          generic_attributes.operation_tag,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    operation.create_date = main_view_model.create_date
                    self.logger.debug(
                        "Found main ViewModel to create operation for it: " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, main_view_model,
                                             dt_paths, all_view_model_files))

                elif len(dt_paths) > 0:
                    alg = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE,
                                                      TVB_IMPORTER_CLASS)
                    default_adapter = ABCAdapter.build_adapter(alg)
                    view_model = default_adapter.get_view_model_class()()
                    view_model.data_file = dt_paths[0]
                    vm_path = h5.store_view_model(view_model, root)
                    all_view_model_files.append(vm_path)
                    operation = Operation(view_model.gid.hex,
                                          project.fk_admin,
                                          project.id,
                                          alg.id,
                                          status=STATUS_FINISHED,
                                          start_date=datetime.now(),
                                          completion_date=datetime.now())
                    self.logger.debug(
                        "Found no ViewModel in folder, so we default to " +
                        str(operation))

                    retrieved_operations.append(
                        Operation2ImportData(operation, root, view_model,
                                             dt_paths, all_view_model_files,
                                             True))

        return sorted(retrieved_operations,
                      key=lambda op_data: op_data.order_field)