示例#1
0
def update(input_file):
    """
    In order to avoid segmentation faults when updating a batch of files just
    start every conversion on a different Python process.

    :param input_file: the file that needs to be converted to a newer file storage version.
        This should be a file that still uses TVB 2.0 storage
    """
    if not os.path.isfile(input_file):
        raise FileVersioningException(
            "The input path %s received for upgrading from 2 -> 3 is not a "
            "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if class_name == "LocalConnectivity":
        root_metadata[
            DataTypeMetaData.KEY_MODULE] = "tvb.datatypes.local_connectivity"
        storage_manager.set_metadata(root_metadata)
        _update_localconnectivity_metadata(folder, file_name)

    elif class_name == "RegionMapping":
        root_metadata[
            DataTypeMetaData.KEY_MODULE] = "tvb.datatypes.region_mapping"

    root_metadata[
        TvbProfile.current.version.
        DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
示例#2
0
    def __init__(self, path):
        # type: (str) -> None
        self.path = path
        storage_path, file_name = os.path.split(path)
        self.storage_manager = HDF5StorageManager(storage_path, file_name)
        # would be nice to have an opened state for the chunked api instead of the close_file=False

        # common scalar headers
        self.gid = Uuid(HasTraits.gid, self)
        self.written_by = Scalar(Attr(str), self, name='written_by')
        self.create_date = Scalar(Attr(str), self, name='create_date')

        # Generic attributes descriptors
        self.generic_attributes = GenericAttributes()
        self.invalid = Scalar(Attr(bool), self, name='invalid')
        self.is_nan = Scalar(Attr(bool), self, name='is_nan')
        self.subject = Scalar(Attr(str), self, name='subject')
        self.state = Scalar(Attr(str), self, name='state')
        self.type = Scalar(Attr(str), self, name='type')
        self.user_tag_1 = Scalar(Attr(str), self, name='user_tag_1')
        self.user_tag_2 = Scalar(Attr(str), self, name='user_tag_2')
        self.user_tag_3 = Scalar(Attr(str), self, name='user_tag_3')
        self.user_tag_4 = Scalar(Attr(str), self, name='user_tag_4')
        self.user_tag_5 = Scalar(Attr(str), self, name='user_tag_5')
        self.visible = Scalar(Attr(bool), self, name='visible')
        self.metadata_cache = None

        if not self.storage_manager.is_valid_hdf5_file():
            self.written_by.store(self.__class__.__module__ + '.' +
                                  self.__class__.__name__)
            self.is_new_file = True
示例#3
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(
                    current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(
                            folder,
                            h5file,
                            self.operation_id,
                            final_storage=self.storage_path)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(
                                datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException(
                            "Invalid file received as input. Most probably incomplete "
                            "meta-data ...  " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  data_file)
示例#4
0
def update(input_file, burst_match_dict=None):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    class_name = str(class_name, 'utf-8')
    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = ProjectionsType.EEG.value
        if "SEEG" in class_name:
            projection_type = ProjectionsType.SEEG.value
        elif "MEG" in class_name:
            projection_type = ProjectionsType.MEG.value

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        try:
            operation_id = int(os.path.split(folder)[1])
            dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id)
            dt_db = dao.get_datatype_by_gid(dt.gid)
        except ValueError:
            dt_db = None

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
示例#5
0
 def _get_file_storage_mng(self):
     """
     Build the manager responsible for storing data into a file on disk
     """
     if not hasattr(self, "_storage_manager") or self._storage_manager is None:
         file_name = self.get_storage_file_name()
         self._storage_manager = HDF5StorageManager(self.storage_path, file_name)
     return self._storage_manager
示例#6
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            if zipfile.is_zipfile(view_model.data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                operations = service.import_project_operations(
                    current_op.project, tmp_folder)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        service.check_import_references(
                            view_model.data_file, datatype)
                        service.store_datatype(datatype, view_model.data_file)
                        self.nr_of_datatypes += 1
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
示例#7
0
 def remove_metadata_param(file_path,
                           param,
                           dataset_name='',
                           where=HDF5StorageManager.ROOT_NODE_PATH):
     base_dir, fname = os.path.split(file_path)
     storage_manager = HDF5StorageManager(base_dir, fname)
     if param in storage_manager.get_metadata(dataset_name=dataset_name,
                                              where=where):
         storage_manager.remove_metadata(param,
                                         dataset_name=dataset_name,
                                         where=where)
示例#8
0
 def from_file(path):
     # type: (str) -> typing.Type[H5File]
     base_dir, fname = os.path.split(path)
     storage_manager = HDF5StorageManager(base_dir, fname)
     meta = storage_manager.get_metadata()
     h5file_class_fqn = meta.get('written_by')
     if h5file_class_fqn is None:
         return H5File(path)
     package, cls_name = h5file_class_fqn.rsplit('.', 1)
     module = importlib.import_module(package)
     cls = getattr(module, cls_name)
     return cls(path)
示例#9
0
 def determine_type(path):
     # type: (str) -> typing.Type[HasTraits]
     base_dir, fname = os.path.split(path)
     storage_manager = HDF5StorageManager(base_dir, fname)
     meta = storage_manager.get_metadata()
     type_class_fqn = meta.get('type')
     if type_class_fqn is None:
         return HasTraits
     package, cls_name = type_class_fqn.rsplit('.', 1)
     module = importlib.import_module(package)
     cls = getattr(module, cls_name)
     return cls
示例#10
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise FileVersioningException("The input path %s received for upgrading from 3 -> 4 is not a "
                                      "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = projections_data.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections_data.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections_data.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            # restore in DB, in case TVB 1.4 had wrongly imported flags
            dao.store_entity(dt_db)

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
示例#11
0
    def load_datatype_from_file(self,
                                storage_folder,
                                file_name,
                                op_id,
                                datatype_group=None,
                                move=True):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: datatype
        """
        self.logger.debug("Loading datatType from file: %s" % file_name)
        storage_manager = HDF5StorageManager(storage_folder, file_name)
        meta_dictionary = storage_manager.get_metadata()
        meta_structure = DataTypeMetaData(meta_dictionary)

        # Now try to determine class and instantiate it
        class_name = meta_structure[DataTypeMetaData.KEY_CLASS_NAME]
        class_module = meta_structure[DataTypeMetaData.KEY_MODULE]
        datatype = __import__(class_module, globals(), locals(), [class_name])
        datatype = getattr(datatype, class_name)
        type_instance = manager_of_class(datatype).new_instance()

        # Now we fill data into instance
        type_instance.type = str(type_instance.__class__.__name__)
        type_instance.module = str(type_instance.__module__)

        # Fill instance with meta data
        type_instance.load_from_metadata(meta_dictionary)

        #Add all the required attributes
        if datatype_group is not None:
            type_instance.fk_datatype_group = datatype_group.id
        type_instance.set_operation_id(op_id)

        # Now move storage file into correct folder if necessary
        current_file = os.path.join(storage_folder, file_name)
        new_file = type_instance.get_storage_file_path()
        if new_file != current_file and move:
            shutil.move(current_file, new_file)

        return type_instance
 def _get_manager(file_path):
     """
     Returns a storage manager.
     """
     folder, file_name = os.path.split(file_path)
     return HDF5StorageManager(folder, file_name)
示例#13
0
 def get_metadata_param(path, param):
     base_dir, fname = os.path.split(path)
     storage_manager = HDF5StorageManager(base_dir, fname)
     meta = storage_manager.get_metadata()
     return meta.get(param)
示例#14
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            current_op = dao.get_operation_by_id(self.operation_id)
            if zipfile.is_zipfile(view_model.data_file):
                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                is_group = False
                current_op_id = current_op.id
                for file in os.listdir(tmp_folder):
                    # In case we import a DatatypeGroup, we want the default import flow
                    if os.path.isdir(os.path.join(tmp_folder, file)):
                        current_op_id = None
                        is_group = True
                        break
                try:
                    operations, all_dts, stored_dts_count = service.import_project_operations(
                        current_op.project, tmp_folder, is_group,
                        current_op_id)
                    self.nr_of_datatypes += stored_dts_count
                    if stored_dts_count == 0:
                        current_op.additional_info = 'All chosen datatypes already exist!'
                        dao.store_entity(current_op)
                    elif stored_dts_count < all_dts:
                        current_op.additional_info = 'Part of the chosen datatypes already exist!'
                        dao.store_entity(current_op)
                except ImportException as excep:
                    self.log.exception(excep)
                    current_op.additional_info = excep.message
                    current_op.status = STATUS_ERROR
                    raise LaunchException("Invalid file received as input. " +
                                          str(excep))
                finally:
                    shutil.rmtree(tmp_folder)
            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        stored_new_dt = service.store_or_link_datatype(
                            datatype, view_model.data_file,
                            current_op.project.id)
                        if stored_new_dt == 0:
                            current_op.additional_info = 'The chosen datatype already exists!'
                            dao.store_entity(current_op)
                        self.nr_of_datatypes += stored_new_dt
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)