예제 #1
0
def update(input_file, burst_match_dict=None):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    class_name = str(class_name, 'utf-8')
    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = ProjectionsType.EEG.value
        if "SEEG" in class_name:
            projection_type = ProjectionsType.SEEG.value
        elif "MEG" in class_name:
            projection_type = ProjectionsType.MEG.value

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        try:
            operation_id = int(os.path.split(folder)[1])
            dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id)
            dt_db = dao.get_datatype_by_gid(dt.gid)
        except ValueError:
            dt_db = None

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
예제 #2
0
 def get_gid_attribute(self):
     """
     Used for obtaining the gid of the DataType of
     which data are stored in the current file.
     """
     if self.is_valid_hdf5_file():
         metadata = self.get_metadata()
         if GenericMetaData.KEY_GID in metadata:
             return metadata[GenericMetaData.KEY_GID]
         else:
             raise IncompatibleFileManagerException("Could not find the Gid attribute in the "
                                                    "input file %s." % self.__storage_full_name)
     raise IncompatibleFileManagerException("File %s is not a hdf5 format file. Are you using the correct "
                                            "manager for this file?" % (self.__storage_full_name,))
예제 #3
0
    def get_file_data_version(self):
        """
        Checks the data version for the current file.
        """
        if not os.path.exists(self.__storage_full_name):
            raise MissingDataFileException("File storage data not found at path %s" % (self.__storage_full_name,))

        if self.is_valid_hdf5_file():
            metadata = self.get_metadata()
            if cfg.DATA_VERSION_ATTRIBUTE in metadata:
                return metadata[cfg.DATA_VERSION_ATTRIBUTE]
            else:
                raise IncompatibleFileManagerException("Could not find TVB specific data version attribute %s in file: "
                                                       "%s." % (cfg.DATA_VERSION_ATTRIBUTE, self.__storage_full_name))
        raise IncompatibleFileManagerException("File %s is not a hdf5 format file. Are you using the correct "
                                               "manager for this file?" % (self.__storage_full_name,))
예제 #4
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    raise IncompatibleFileManagerException(
        "Not yet implemented update for file %s" % input_file)
예제 #5
0
    def store_data(self, dataset_name, data_list, where=ROOT_NODE_PATH):
        """
        This method stores provided data list into a data set in the H5 file.
        
        :param dataset_name: Name of the data set where to store data
        :param data_list: Data to be stored
        :param where: represents the path where to store our dataset (e.g. /data/info)
        """
        if dataset_name is None:
            dataset_name = ''
        if where is None:
            where = self.ROOT_NODE_PATH

        data_to_store = self._check_data(data_list)

        try:
            LOG.debug("Saving data into data set: %s" % dataset_name)
            # Open file in append mode ('a') to allow adding multiple data sets in the same file
            hdf5_file = self._open_h5_file()

            full_dataset_name = where + dataset_name
            if full_dataset_name not in hdf5_file:
                hdf5_file.create_dataset(full_dataset_name, data=data_to_store)

            elif hdf5_file[full_dataset_name].shape == data_to_store.shape:
                hdf5_file[full_dataset_name][...] = data_to_store[...]

            else:
                raise IncompatibleFileManagerException(
                    "Cannot update existing H5 DataSet %s with a different shape. "
                    "Try defining it as chunked!" % full_dataset_name)

        finally:
            # Now close file
            self.close_file()