Пример #1
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(
                    current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(
                            folder,
                            h5file,
                            self.operation_id,
                            final_storage=self.storage_path)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(
                                datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException(
                            "Invalid file received as input. Most probably incomplete "
                            "meta-data ...  " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  data_file)
Пример #2
0
def update(input_file, burst_match_dict=None):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    class_name = str(class_name, 'utf-8')
    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = ProjectionsType.EEG.value
        if "SEEG" in class_name:
            projection_type = ProjectionsType.SEEG.value
        elif "MEG" in class_name:
            projection_type = ProjectionsType.MEG.value

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        try:
            operation_id = int(os.path.split(folder)[1])
            dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id)
            dt_db = dao.get_datatype_by_gid(dt.gid)
        except ValueError:
            dt_db = None

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
Пример #3
0
def _update_localconnectivity_metadata(folder, file_name):

    service = ImportService()
    operation_id = int(os.path.split(folder)[1])

    dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
    info_dict = SparseMatrix.extract_sparse_matrix_metadata(dt.matrix)
    dt.set_metadata(info_dict, "", True, SparseMatrix.ROOT_PATH + "matrix")
Пример #4
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            if zipfile.is_zipfile(view_model.data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(view_model.data_file, tmp_folder)
                operations = service.import_project_operations(
                    current_op.project, tmp_folder)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                folder, h5file = os.path.split(view_model.data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        service.check_import_references(
                            view_model.data_file, datatype)
                        service.store_datatype(datatype, view_model.data_file)
                        self.nr_of_datatypes += 1
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)
Пример #5
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
                                               "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
        raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
                                               "metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = projections.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            try:
                # restore in DB, in case TVB 1.4 had wrongly imported flags
                dao.store_entity(dt_db)
            except Exception:
                LOGGER.exception("Could not update flags in DB, but we continue with the update!")

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
Пример #6
0
def _update_localconnectivity_metadata(folder, file_name):

    service = ImportService()
    operation_id = int(os.path.split(folder)[1])

    dt = service.load_datatype_from_file(folder,
                                         file_name,
                                         operation_id,
                                         move=False)
    info_dict = SparseMatrix.extract_sparse_matrix_metadata(dt.matrix)
    dt.set_metadata(info_dict, '', True, SparseMatrix.ROOT_PATH + 'matrix')
Пример #7
0
def update_localconnectivity_metadata(folder, file_name):
    service = ImportService()
    operation_id = int(os.path.split(folder)[1])

    dt = service.load_datatype_from_file(os.path.join(folder, file_name), operation_id)
    info_dict = {"dtype": dt.matrix.dtype.str,
                 "format": dt.matrix.format,
                 "Shape": str(dt.matrix.shape),
                 "Maximum": dt.matrix.data.max(),
                 "Minimum": dt.matrix.data.min(),
                 "Mean": dt.matrix.mean()}
    dt.set_metadata(info_dict, '', True, '/matrix')
Пример #8
0
    def launch(self, data_file):
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.

        :param data_file: an archive (ZIP / HDF5) containing the `DataType`

        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if data_file is None:
            raise LaunchException("Please select file which contains data to import")

        if os.path.exists(data_file):
            if zipfile.is_zipfile(data_file):
                current_op = dao.get_operation_by_id(self.operation_id)

                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                FilesHelper().unpack_zip(data_file, tmp_folder)
                operations = ImportService().import_project_operations(current_op.project, self.storage_path)
                shutil.rmtree(tmp_folder)
                self.nr_of_datatypes += len(operations)

            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(data_file)

                folder, h5file = os.path.split(data_file)
                manager = HDF5StorageManager(folder, h5file)
                if manager.is_valid_hdf5_file():
                    datatype = None
                    try:
                        service = ImportService()
                        datatype = service.load_datatype_from_file(folder, h5file, self.operation_id)
                        service.store_datatype(datatype)
                        self.nr_of_datatypes += 1
                    except Exception as excep:
                        # If import operation failed delete file from disk.
                        if datatype is not None and os.path.exists(datatype.get_storage_file_path()):
                            os.remove(datatype.get_storage_file_path())
                        self.log.exception(excep)
                        raise LaunchException("Invalid file received as input. Most probably incomplete "
                                              "meta-data ...  " + str(excep))
                else:
                    raise LaunchException("Uploaded file: %s is neither in ZIP or HDF5 format" % data_file)

        else:
            raise LaunchException("File: %s to import does not exists." % data_file)
Пример #9
0
def update(input_file):
    """
    :param input_file: the file that needs to be converted to a newer file storage version.
    """

    if not os.path.isfile(input_file):
        raise FileVersioningException("The input path %s received for upgrading from 3 -> 4 is not a "
                                      "valid file on the disk." % input_file)

    folder, file_name = os.path.split(input_file)
    storage_manager = HDF5StorageManager(folder, file_name)

    root_metadata = storage_manager.get_metadata()
    class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]

    if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
        LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))

        projection_type = projections_data.EEG_POLYMORPHIC_IDENTITY
        if "SEEG" in class_name:
            projection_type = projections_data.SEEG_POLYMORPHIC_IDENTITY
        elif "MEG" in class_name:
            projection_type = projections_data.MEG_POLYMORPHIC_IDENTITY

        root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
        LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))

    elif "TimeSeries" in class_name:
        LOGGER.info("Updating TS %s from %s" % (file_name, folder))

        service = ImportService()
        operation_id = int(os.path.split(folder)[1])
        dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
        dt_db = dao.get_datatype_by_gid(dt.gid)

        if dt_db is not None:
            # DT already in DB (update of own storage, by making sure all fields are being correctly populated)
            dt_db.configure()
            dt_db.persist_full_metadata()
            # restore in DB, in case TVB 1.4 had wrongly imported flags
            dao.store_entity(dt_db)

        elif FIELD_SURFACE_MAPPING not in root_metadata:
            # Have default values, to avoid the full project not being imported
            root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
            root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)

    root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
    storage_manager.set_metadata(root_metadata)
Пример #10
0
def read_h5(full_paths):

    # We need to load the DataType in the context of an operation, and a project
    all_projects = dao.get_all_projects()
    all_operations = dao.get_generic_entity(model.Operation, all_projects[0].id, "fk_launched_in")

    service = ImportService()
    results = []

    for full_path in full_paths:
        folder, h5file = os.path.split(full_path)
        # The actual read of H5:
        datatype = service.load_datatype_from_file(folder, h5file, all_operations[0].id, move=False)

        print "We've build DataType: [%s]" % datatype.__class__.__name__, datatype
        results.append(datatype)

    return results
Пример #11
0
def read_h5(full_paths):
    # We need to load the DataType in the context of an operation, and a project
    all_projects = dao.get_all_projects()
    all_operations = dao.get_generic_entity(Operation, all_projects[0].id,
                                            "fk_launched_in")

    service = ImportService()
    results = []

    for full_path in full_paths:
        # The actual read of H5:
        datatype = service.load_datatype_from_file(full_path,
                                                   all_operations[0].id)

        print("We've build DataType: [%s]" % datatype.__class__.__name__,
              datatype)
        results.append(datatype)

    return results
Пример #12
0
    def launch(self, view_model):
        # type: (TVBImporterModel) -> []
        """
        Execute import operations: unpack ZIP, build and store generic DataType objects.
        :raises LaunchException: when data_file is None, nonexistent, or invalid \
                    (e.g. incomplete meta-data, not in ZIP / HDF5 format etc. )
        """
        if view_model.data_file is None:
            raise LaunchException(
                "Please select file which contains data to import")

        service = ImportService()
        if os.path.exists(view_model.data_file):
            current_op = dao.get_operation_by_id(self.operation_id)
            if zipfile.is_zipfile(view_model.data_file):
                # Creates a new TMP folder where to extract data
                tmp_folder = os.path.join(self.storage_path, "tmp_import")
                self.storage_interface.unpack_zip(view_model.data_file,
                                                  tmp_folder)
                is_group = False
                current_op_id = current_op.id
                for file in os.listdir(tmp_folder):
                    # In case we import a DatatypeGroup, we want the default import flow
                    if os.path.isdir(os.path.join(tmp_folder, file)):
                        current_op_id = None
                        is_group = True
                        break
                try:
                    operations, all_dts, stored_dts_count = service.import_project_operations(
                        current_op.project, tmp_folder, is_group,
                        current_op_id)
                    self.nr_of_datatypes += stored_dts_count
                    if stored_dts_count == 0:
                        current_op.additional_info = 'All chosen datatypes already exist!'
                        dao.store_entity(current_op)
                    elif stored_dts_count < all_dts:
                        current_op.additional_info = 'Part of the chosen datatypes already exist!'
                        dao.store_entity(current_op)
                except ImportException as excep:
                    self.log.exception(excep)
                    current_op.additional_info = excep.message
                    current_op.status = STATUS_ERROR
                    raise LaunchException("Invalid file received as input. " +
                                          str(excep))
                finally:
                    shutil.rmtree(tmp_folder)
            else:
                # upgrade file if necessary
                file_update_manager = FilesUpdateManager()
                file_update_manager.upgrade_file(view_model.data_file)

                if self.storage_interface.get_storage_manager(
                        view_model.data_file).is_valid_tvb_file():
                    datatype = None
                    try:
                        datatype = service.load_datatype_from_file(
                            view_model.data_file, self.operation_id)
                        stored_new_dt = service.store_or_link_datatype(
                            datatype, view_model.data_file,
                            current_op.project.id)
                        if stored_new_dt == 0:
                            current_op.additional_info = 'The chosen datatype already exists!'
                            dao.store_entity(current_op)
                        self.nr_of_datatypes += stored_new_dt
                    except ImportException as excep:
                        self.log.exception(excep)
                        if datatype is not None:
                            target_path = h5.path_for_stored_index(datatype)
                            if os.path.exists(target_path):
                                os.remove(target_path)
                        raise LaunchException(
                            "Invalid file received as input. " + str(excep))
                else:
                    raise LaunchException(
                        "Uploaded file: %s is neither in ZIP or HDF5 format" %
                        view_model.data_file)

        else:
            raise LaunchException("File: %s to import does not exists." %
                                  view_model.data_file)