Пример #1
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes,
                                        dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(
                    datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id],
                                        project.id)
Пример #2
0
    def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
        def by_time(dt):
            return dt.create_date or datetime.now()

        if burst_ids_mapping is None:
            burst_ids_mapping = {}
        if dt_burst_mappings is None:
            dt_burst_mappings = {}

        all_datatypes.sort(key=by_time)

        for datatype in all_datatypes:
            old_burst_id = dt_burst_mappings.get(datatype.gid)

            if old_burst_id is not None:
                datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]

            datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)

            if not datatype_allready_in_tvb:
                # Compute disk size. Similar to ABCAdapter._capture_operation_results.
                # No need to close the h5 as we have not written to it.
                associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
                datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)

                self.store_datatype(datatype)
            else:
                FlowService.create_link([datatype_allready_in_tvb.id], project.id)
Пример #3
0
    def load_datatype_from_file(self, storage_folder, file_name, op_id, datatype_group=None,
                                move=True, final_storage=None):
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % file_name)
        datatype, generic_attributes = h5.load_with_references(os.path.join(storage_folder, file_name))
        index_class = h5.REGISTRY.get_index_for_datatype(datatype.__class__)
        datatype_index = index_class()
        datatype_index.fill_from_has_traits(datatype)
        datatype_index.fill_from_generic_attributes(generic_attributes)

        # Add all the required attributes
        if datatype_group is not None:
            datatype_index.fk_datatype_group = datatype_group.id
        datatype_index.fk_from_operation = op_id

        associated_file = h5.path_for_stored_index(datatype_index)
        if os.path.exists(associated_file):
            datatype_index.disk_size = FilesHelper.compute_size_on_disk(associated_file)

        # Now move storage file into correct folder if necessary
        if move and final_storage is not None:
            current_file = os.path.join(storage_folder, file_name)
            h5_type = h5.REGISTRY.get_h5file_for_datatype(datatype.__class__)
            final_path = h5.path_for(final_storage, h5_type, datatype.gid)
            if final_path != current_file and move:
                shutil.move(current_file, final_path)

        return datatype_index
 def _update_datatype_disk_size(self, file_path):
     """
     Computes and updates the disk_size attribute of the DataType, for which was created the given file.
     """
     file_handler = FilesHelper()
     datatype_gid = self._get_manager(file_path).get_gid_attribute()
     datatype = dao.get_datatype_by_gid(datatype_gid)
     
     if datatype is not None:
         datatype.disk_size = file_handler.compute_size_on_disk(file_path)
         dao.store_entity(datatype)
Пример #5
0
    def load_datatype_from_file(self,
                                current_file,
                                op_id,
                                datatype_group=None,
                                current_project_id=None):
        # type: (str, int, DataTypeGroup, int) -> HasTraitsIndex
        """
        Creates an instance of datatype from storage / H5 file 
        :returns: DatatypeIndex
        """
        self.logger.debug("Loading DataType from file: %s" % current_file)
        h5_class = H5File.h5_class_from_file(current_file)

        if h5_class is BurstConfigurationH5:
            if current_project_id is None:
                op_entity = dao.get_operationgroup_by_id(op_id)
                current_project_id = op_entity.fk_launched_in
            h5_file = BurstConfigurationH5(current_file)
            burst = BurstConfiguration(current_project_id)
            burst.fk_simulation = op_id
            h5_file.load_into(burst)
            result = burst
        else:
            datatype, generic_attributes = h5.load_with_links(current_file)
            index_class = h5.REGISTRY.get_index_for_datatype(
                datatype.__class__)
            datatype_index = index_class()
            datatype_index.fill_from_has_traits(datatype)
            datatype_index.fill_from_generic_attributes(generic_attributes)

            # Add all the required attributes
            if datatype_group:
                datatype_index.fk_datatype_group = datatype_group.id
                if len(datatype_group.subject) == 0:
                    datatype_group.subject = datatype_index.subject
                    dao.store_entity(datatype_group)
            datatype_index.fk_from_operation = op_id

            associated_file = h5.path_for_stored_index(datatype_index)
            if os.path.exists(associated_file):
                datatype_index.disk_size = FilesHelper.compute_size_on_disk(
                    associated_file)
            result = datatype_index

        return result
Пример #6
0
class ABCAdapter(object):
    """
    Root Abstract class for all TVB Adapters. 
    """
    # todo this constants copy is not nice
    TYPE_SELECT = input_tree.TYPE_SELECT
    TYPE_MULTIPLE = input_tree.TYPE_MULTIPLE
    STATIC_ACCEPTED_TYPES = input_tree.STATIC_ACCEPTED_TYPES
    KEY_TYPE = input_tree.KEY_TYPE
    KEY_OPTIONS = input_tree.KEY_OPTIONS
    KEY_ATTRIBUTES = input_tree.KEY_ATTRIBUTES
    KEY_NAME = input_tree.KEY_NAME
    KEY_DESCRIPTION = input_tree.KEY_DESCRIPTION
    KEY_VALUE = input_tree.KEY_VALUE
    KEY_LABEL = input_tree.KEY_LABEL
    KEY_DEFAULT = input_tree.KEY_DEFAULT
    KEY_DATATYPE = input_tree.KEY_DATATYPE
    KEY_DTYPE = input_tree.KEY_DTYPE
    KEY_DISABLED = input_tree.KEY_DISABLED
    KEY_ALL = input_tree.KEY_ALL
    KEY_CONDITION = input_tree.KEY_CONDITION
    KEY_FILTERABLE = input_tree.KEY_FILTERABLE
    KEY_REQUIRED = input_tree.KEY_REQUIRED
    KEY_ID = input_tree.KEY_ID
    KEY_UI_HIDE = input_tree.KEY_UI_HIDE

    # TODO: move everything related to parameters PRE + POST into parameters_factory
    KEYWORD_PARAMS = input_tree.KEYWORD_PARAMS
    KEYWORD_SEPARATOR = input_tree.KEYWORD_SEPARATOR
    KEYWORD_OPTION = input_tree.KEYWORD_OPTION

    INTERFACE_ATTRIBUTES_ONLY = "attributes-only"
    INTERFACE_ATTRIBUTES = "attributes"

    # model.Algorithm instance that will be set for each adapter created by in build_adapter method
    stored_adapter = None

    def __init__(self):
        # It will be populate with key from DataTypeMetaData
        self.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }
        self.generic_attributes = GenericAttributes()
        self.generic_attributes.subject = DataTypeMetaData.DEFAULT_SUBJECT
        self.file_handler = FilesHelper()
        self.storage_path = '.'
        # Will be populate with current running operation's identifier
        self.operation_id = None
        self.user_id = None
        self.log = get_logger(self.__class__.__module__)
        self.tree_manager = InputTreeManager()
        self.submitted_form = None

    @classmethod
    def get_group_name(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "name"):
            return cls._ui_group.name
        return None

    @classmethod
    def get_group_description(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "description"):
            return cls._ui_group.description
        return None

    @classmethod
    def get_ui_name(cls):
        if hasattr(cls, "_ui_name"):
            return cls._ui_name
        else:
            return cls.__name__

    @classmethod
    def get_ui_description(cls):
        if hasattr(cls, "_ui_description"):
            return cls._ui_description

    @classmethod
    def get_ui_subsection(cls):
        if hasattr(cls, "_ui_subsection"):
            return cls._ui_subsection

        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "subsection"):
            return cls._ui_group.subsection

    @staticmethod
    def can_be_active():
        """
        To be overridden where needed (e.g. Matlab dependent adapters).
        :return: By default True, and False when the current Adapter can not be executed in the current env
        for various reasons (e.g. no Matlab or Octave installed)
        """
        return True

    def get_input_tree(self):
        """
        Describes inputs and outputs of the launch method.
        """
        return None

    def submit_form(self, form):
        self.submitted_form = form

    # TODO separate usage of get_form_class (returning a class) and return of a submitted instance
    def get_form(self):
        if self.submitted_form is not None:
            return self.submitted_form
        return self.get_form_class()

    @abstractmethod
    def get_form_class(self):
        return None

    @abstractmethod
    def get_output(self):
        """
        Describes inputs and outputs of the launch method.
        """

    def configure(self, **kwargs):
        """
        To be implemented in each Adapter that requires any specific configurations
        before the actual launch.
        """

    @abstractmethod
    def get_required_memory_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter.
        """

    @abstractmethod
    def get_required_disk_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter in kilo-Bytes.
        """

    def get_execution_time_approximation(self, **kwargs):
        """
        Method should approximate based on input arguments, the time it will take for the operation 
        to finish (in seconds).
        """
        return -1

    @abstractmethod
    def launch(self):
        """
         To be implemented in each Adapter.
         Will contain the logic of the Adapter.
         Any returned DataType will be stored in DB, by the Framework.
        """

    def add_operation_additional_info(self, message):
        """
        Adds additional info on the operation to be displayed in the UI. Usually a warning message.
        """
        current_op = dao.get_operation_by_id(self.operation_id)
        current_op.additional_info = message
        dao.store_entity(current_op)

    def _prepare_generic_attributes(self, user_tag=None):

        self.generic_attributes.subject = str(
            self.meta_data.get(DataTypeMetaData.KEY_SUBJECT))
        self.generic_attributes.state = self.meta_data.get(
            DataTypeMetaData.KEY_STATE)

        perpetuated_identifier = self.generic_attributes.user_tag_1
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data.get(
                DataTypeMetaData.KEY_TAG_1)
        if not self.generic_attributes.user_tag_1:
            self.generic_attributes.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
        else:
            self.generic_attributes.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier

    @nan_not_allowed()
    def _prelaunch(self,
                   operation,
                   uid=None,
                   available_disk_space=0,
                   **kwargs):
        """
        Method to wrap LAUNCH.
        Will prepare data, and store results on return. 
        """
        self.meta_data.update(json.loads(operation.meta_data))
        self.storage_path = self.file_handler.get_project_folder(
            operation.project, str(operation.id))
        self.operation_id = operation.id
        self.current_project_id = operation.project.id
        self.user_id = operation.fk_launched_by

        self.configure(**kwargs)

        # Compare the amount of memory the current algorithms states it needs,
        # with the average between the RAM available on the OS and the free memory at the current moment.
        # We do not consider only the free memory, because some OSs are freeing late and on-demand only.
        total_free_memory = psutil.virtual_memory().free + psutil.swap_memory(
        ).free
        total_existent_memory = psutil.virtual_memory(
        ).total + psutil.swap_memory().total
        memory_reference = (total_free_memory + total_existent_memory) / 2
        adapter_required_memory = self.get_required_memory_size(**kwargs)

        if adapter_required_memory > memory_reference:
            msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)."
            raise NoMemoryAvailableException(
                msg %
                (adapter_required_memory / 2**30, memory_reference / 2**30))

        # Compare the expected size of the operation results with the HDD space currently available for the user
        # TVB defines a quota per user.
        required_disk_space = self.get_required_disk_size(**kwargs)
        if available_disk_space < 0:
            msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution."
            raise NoMemoryAvailableException(msg %
                                             (-available_disk_space / 2**10))
        if available_disk_space < required_disk_space:
            msg = (
                "You only have %.2f GB of disk space available but the operation you "
                "launched might require %.2f Stopping execution...")
            raise NoMemoryAvailableException(
                msg %
                (available_disk_space / 2**20, required_disk_space / 2**20))

        operation.start_now()
        operation.estimated_disk_size = required_disk_space
        dao.store_entity(operation)

        self._prepare_generic_attributes(uid)
        result = self.launch(**kwargs)

        if not isinstance(result, (list, tuple)):
            result = [
                result,
            ]
        self.__check_integrity(result)
        return self._capture_operation_results(result)

    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]

        count_stored = 0
        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            res.subject = self.generic_attributes.subject
            res.state = self.generic_attributes.state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            res.user_tag_1 = self.generic_attributes.user_tag_1
            res.user_tag_2 = self.generic_attributes.user_tag_2
            res.fk_datatype_group = data_type_group_id
            # Compute size-on disk, in case file-storage is used
            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
                with H5File.from_file(associated_file) as f:
                    f.store_generic_attributes(self.generic_attributes)
            dao.store_entity(res)
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', count_stored

    def __check_integrity(self, result):
        """
        Check that the returned parameters for LAUNCH operation
        are of the type specified in the adapter's interface.
        """
        for result_entity in result:
            if result_entity is None:
                continue
            if not self.__is_data_in_supported_types(result_entity):
                msg = "Unexpected output DataType %s"
                raise InvalidParameterException(msg % type(result_entity))

    def __is_data_in_supported_types(self, data):

        if data is None:
            return True
        for supported_type in self.get_output():
            if isinstance(data, supported_type):
                return True
        # Data can't be mapped on any supported type !!
        return False

    def _is_group_launch(self):
        """
        Return true if this adapter is launched from a group of operations
        """
        operation = dao.get_operation_by_id(self.operation_id)
        return operation.fk_operation_group is not None

    @staticmethod
    def load_entity_by_gid(data_gid):
        """
        Load a generic DataType, specified by GID.
        """
        return load_entity_by_gid(data_gid)

    @staticmethod
    def build_adapter_from_class(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        if not issubclass(adapter_class, ABCAdapter):
            raise IntrospectionException(
                "Invalid data type: It should extend adapters.ABCAdapter!")
        try:
            stored_adapter = dao.get_algorithm_by_module(
                adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance
        except Exception as excep:
            LOGGER.exception(excep)
            raise IntrospectionException(str(excep))

    @staticmethod
    def build_adapter(stored_adapter):
        """
        Having a module and a class name, create an instance of ABCAdapter.
        """
        try:
            ad_module = importlib.import_module(stored_adapter.module)
            adapter_class = getattr(ad_module, stored_adapter.classname)
            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance

        except Exception:
            msg = "Could not load Adapter Instance for Stored row %s" % stored_adapter
            LOGGER.exception(msg)
            raise IntrospectionException(msg)

    # METHODS for PROCESSING PARAMETERS start here #############################

    def review_operation_inputs(self, parameters):
        """
        :returns: a list with the inputs from the parameters list that are instances of DataType,\
            and a dictionary with all parameters which are different than the declared defauts
        """
        flat_interface = self.flaten_input_interface()
        return self.tree_manager.review_operation_inputs(
            parameters, flat_interface)

    def prepare_ui_inputs(self, kwargs, validation_required=True):
        """
        Prepare the inputs received from a HTTP Post in a form that will be
        used by the Python adapter.
        """
        algorithm_inputs = self.get_input_tree()
        algorithm_inputs = InputTreeManager.prepare_param_names(
            algorithm_inputs)
        self.tree_manager.append_required_defaults(kwargs, algorithm_inputs)
        return self.convert_ui_inputs(kwargs,
                                      validation_required=validation_required)

    def convert_ui_inputs(self, kwargs, validation_required=True):
        """
        Convert HTTP POST parameters into Python parameters.
        """
        return self.tree_manager.convert_ui_inputs(
            self.flaten_input_interface(), kwargs, self.meta_data,
            validation_required)

    def noise_configurable_parameters(self):
        return [
            entry[self.KEY_NAME] for entry in self.flaten_input_interface()
            if 'configurableNoise' in entry
        ]

    def flaten_input_interface(self):
        # TODO: temporary condition to pass introspection on neoforms
        form = self.get_form_class()()
        if form:
            return [
                form._get_original_field_name(form_field)
                for form_field in form.fields
            ]
        return self.tree_manager.flatten(self.get_input_tree())
Пример #7
0
    def import_project_operations(self,
                                  project,
                                  import_path,
                                  is_group=False,
                                  importer_operation_id=None):
        """
        This method scans provided folder and identify all operations that needs to be imported
        """
        all_dts_count = 0
        all_stored_dts_count = 0
        imported_operations = []
        ordered_operations = self._retrieve_operations_in_order(
            project, import_path, importer_operation_id)

        for operation_data in ordered_operations:

            if operation_data.is_old_form:
                operation_entity, datatype_group = self.import_operation(
                    operation_data.operation)
                new_op_folder = self.files_helper.get_project_folder(
                    project, str(operation_entity.id))

                try:
                    operation_datatypes = self._load_datatypes_from_operation_folder(
                        operation_data.operation_folder, operation_entity,
                        datatype_group)
                    # Create and store view_model from operation
                    self.create_view_model(operation_entity, operation_data,
                                           new_op_folder)

                    self._store_imported_datatypes_in_db(
                        project, operation_datatypes)
                    imported_operations.append(operation_entity)
                except MissingReferenceException:
                    operation_entity.status = STATUS_ERROR
                    dao.store_entity(operation_entity)

            elif operation_data.main_view_model is not None:
                do_merge = False
                if importer_operation_id:
                    do_merge = True
                operation_entity = dao.store_entity(operation_data.operation,
                                                    merge=do_merge)
                dt_group = None
                op_group = dao.get_operationgroup_by_id(
                    operation_entity.fk_operation_group)
                if op_group:
                    dt_group = dao.get_datatypegroup_by_op_group_id(
                        op_group.id)
                    if not dt_group:
                        first_op = dao.get_operations_in_group(
                            op_group.id, only_first_operation=True)
                        dt_group = DataTypeGroup(
                            op_group,
                            operation_id=first_op.id,
                            state=DEFAULTDATASTATE_INTERMEDIATE)
                        dt_group = dao.store_entity(dt_group)
                # Store the DataTypes in db
                dts = {}
                all_dts_count += len(operation_data.dt_paths)
                for dt_path in operation_data.dt_paths:
                    dt = self.load_datatype_from_file(dt_path,
                                                      operation_entity.id,
                                                      dt_group, project.id)
                    if isinstance(dt, BurstConfiguration):
                        if op_group:
                            dt.fk_operation_group = op_group.id
                        all_stored_dts_count += self._store_or_link_burst_config(
                            dt, dt_path, project.id)
                    else:
                        dts[dt_path] = dt
                        if op_group:
                            op_group.fill_operationgroup_name(dt.type)
                            dao.store_entity(op_group)
                try:
                    stored_dts_count = self._store_imported_datatypes_in_db(
                        project, dts)
                    all_stored_dts_count += stored_dts_count

                    if operation_data.main_view_model.is_metric_operation:
                        self._update_burst_metric(operation_entity)

                    #TODO: TVB-2849 to reveiw these flags and simplify condition
                    if stored_dts_count > 0 or (
                            not operation_data.is_self_generated and
                            not is_group) or importer_operation_id is not None:
                        imported_operations.append(operation_entity)
                        new_op_folder = self.files_helper.get_project_folder(
                            project, str(operation_entity.id))
                        view_model_disk_size = 0
                        for h5_file in operation_data.all_view_model_files:
                            view_model_disk_size += FilesHelper.compute_size_on_disk(
                                h5_file)
                            shutil.move(h5_file, new_op_folder)
                        operation_entity.view_model_disk_size = view_model_disk_size
                        dao.store_entity(operation_entity)
                    else:
                        # In case all Dts under the current operation were Links and the ViewModel is dummy,
                        # don't keep the Operation empty in DB
                        dao.remove_entity(Operation, operation_entity.id)
                        self.files_helper.remove_operation_data(
                            project.name, operation_entity.id)
                except MissingReferenceException as excep:
                    dao.remove_entity(Operation, operation_entity.id)
                    self.files_helper.remove_operation_data(
                        project.name, operation_entity.id)
                    raise excep
            else:
                self.logger.warning(
                    "Folder %s will be ignored, as we could not find a serialized "
                    "operation or DTs inside!" %
                    operation_data.operation_folder)

        self._update_dt_groups(project.id)
        self._update_burst_configurations(project.id)
        return imported_operations, all_dts_count, all_stored_dts_count
Пример #8
0
class FilesUpdateManager(UpdateManager):
    """
    Manager for updating H5 files version, when code gets changed.
    """

    UPDATE_SCRIPTS_SUFFIX = "_update_files"
    PROJECTS_PAGE_SIZE = 20
    DATA_TYPES_PAGE_SIZE = 500
    STATUS = True
    MESSAGE = "Done"

    def __init__(self):
        super(FilesUpdateManager, self).__init__(
            file_update_scripts,
            TvbProfile.current.version.DATA_CHECKED_TO_VERSION,
            TvbProfile.current.version.DATA_VERSION)
        self.files_helper = FilesHelper()

    def get_file_data_version(self, file_path):
        """
        Return the data version for the given file.
        
        :param file_path: the path on disk to the file for which you need the TVB data version
        :returns: a number representing the data version for which the input file was written
        """
        manager = self._get_manager(file_path)
        return manager.get_file_data_version()

    def is_file_up_to_date(self, file_path):
        """
        Returns True only if the data version of the file is equal with the
        data version specified into the TVB configuration file.
        """
        try:
            file_version = self.get_file_data_version(file_path)
        except MissingDataFileException as ex:
            self.log.exception(ex)
            return False
        except FileStructureException as ex:
            self.log.exception(ex)
            return False

        if file_version == TvbProfile.current.version.DATA_VERSION:
            return True
        return False

    def upgrade_file(self, input_file_name, datatype=None):
        """
        Upgrades the given file to the latest data version. The file will be upgraded
        sequentially, up until the current version from tvb.basic.config.settings.VersionSettings.DB_STRUCTURE_VERSION
        
        :param input_file_name the path to the file which needs to be upgraded
        :return True, when update was needed and running it was successful.
        False, the the file is already up to date.

        """
        if self.is_file_up_to_date(input_file_name):
            # Avoid running the DB update of size, when H5 is not being changed, to speed-up
            return False

        file_version = self.get_file_data_version(input_file_name)
        self.log.info("Updating from version %s , file: %s " %
                      (file_version, input_file_name))
        for script_name in self.get_update_scripts(file_version):
            self.run_update_script(script_name, input_file=input_file_name)

        if datatype:
            # Compute and update the disk_size attribute of the DataType in DB:
            datatype.disk_size = self.files_helper.compute_size_on_disk(
                input_file_name)
            dao.store_entity(datatype)

        return True

    def __upgrade_datatype_list(self, datatypes):
        """
        Upgrade a list of DataTypes to the current version.
        
        :param datatypes: The list of DataTypes that should be upgraded.

        :returns: (nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault) a two-tuple of integers representing
            the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
            some kind of fault occurred
        """
        nr_of_dts_upgraded_fine = 0
        nr_of_dts_upgraded_fault = 0
        no_of_dts_ignored = 0

        for datatype in datatypes:
            try:
                from tvb.basic.traits.types_mapped import MappedType

                specific_datatype = dao.get_datatype_by_gid(datatype.gid,
                                                            load_lazy=False)

                if specific_datatype is None:
                    datatype.invalid = True
                    dao.store_entity(datatype)
                    nr_of_dts_upgraded_fault += 1
                elif isinstance(specific_datatype, MappedType):
                    update_was_needed = self.upgrade_file(
                        specific_datatype.get_storage_file_path(),
                        specific_datatype)
                    if update_was_needed:
                        nr_of_dts_upgraded_fine += 1
                    else:
                        no_of_dts_ignored += 1
                else:
                    # Ignore DataTypeGroups
                    self.log.debug("We will ignore, due to type: " +
                                   str(specific_datatype))
                    no_of_dts_ignored += 1

            except Exception as ex:
                # The file/class is missing for some reason. Just mark the DataType as invalid.
                datatype.invalid = True
                dao.store_entity(datatype)
                nr_of_dts_upgraded_fault += 1
                self.log.exception(ex)

        return nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault, no_of_dts_ignored

    def run_all_updates(self):
        """
        Upgrades all the data types from TVB storage to the latest data version.
        
        :returns: a two entry tuple (status, message) where status is a boolean that is True in case
            the upgrade was successfully for all DataTypes and False otherwise, and message is a status
            update message.
        """
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            total_count = dao.count_all_datatypes()

            self.log.info(
                "Starting to run H5 file updates from version %d to %d, for %d datatypes"
                % (TvbProfile.current.version.DATA_CHECKED_TO_VERSION,
                   TvbProfile.current.version.DATA_VERSION, total_count))

            # Keep track of how many DataTypes were properly updated and how many
            # were marked as invalid due to missing files or invalid manager.
            no_ok = 0
            no_error = 0
            no_ignored = 0
            start_time = datetime.now()

            # Read DataTypes in pages to limit the memory consumption
            for current_idx in range(0, total_count,
                                     self.DATA_TYPES_PAGE_SIZE):
                datatypes_for_page = dao.get_all_datatypes(
                    current_idx, self.DATA_TYPES_PAGE_SIZE)
                count_ok, count_error, count_ignored = self.__upgrade_datatype_list(
                    datatypes_for_page)
                no_ok += count_ok
                no_error += count_error
                no_ignored += count_ignored

                self.log.info(
                    "Updated H5 files so far: %d [fine:%d, error:%d, ignored:%d of total:%d, in: %s min]"
                    % (current_idx + len(datatypes_for_page), no_ok, no_error,
                       no_ignored, total_count,
                       int((datetime.now() - start_time).seconds / 60)))

            # Now update the configuration file since update was done
            config_file_update_dict = {
                stored.KEY_LAST_CHECKED_FILE_VERSION:
                TvbProfile.current.version.DATA_VERSION
            }

            if no_error == 0:
                # Everything went fine
                config_file_update_dict[
                    stored.KEY_FILE_STORAGE_UPDATE_STATUS] = FILE_STORAGE_VALID
                FilesUpdateManager.STATUS = True
                FilesUpdateManager.MESSAGE = (
                    "File upgrade finished successfully for all %s entries. "
                    "Thank you for your patience!" % total_count)
                self.log.info(FilesUpdateManager.MESSAGE)
            else:
                # Something went wrong
                config_file_update_dict[
                    stored.
                    KEY_FILE_STORAGE_UPDATE_STATUS] = FILE_STORAGE_INVALID
                FilesUpdateManager.STATUS = False
                FilesUpdateManager.MESSAGE = (
                    "Out of %s stored DataTypes, %s were upgraded successfully, but %s had "
                    "faults and were marked invalid" %
                    (total_count, no_ok, no_error))
                self.log.warning(FilesUpdateManager.MESSAGE)

            TvbProfile.current.version.DATA_CHECKED_TO_VERSION = TvbProfile.current.version.DATA_VERSION
            TvbProfile.current.manager.add_entries_to_config_file(
                config_file_update_dict)

    @staticmethod
    def _get_manager(file_path):
        """
        Returns a storage manager.
        """
        folder, file_name = os.path.split(file_path)
        return HDF5StorageManager(folder, file_name)
Пример #9
0
class ABCAdapter(object):
    """
    Root Abstract class for all TVB Adapters. 
    """
    # model.Algorithm instance that will be set for each adapter class created by in build_adapter method
    stored_adapter = None
    launch_mode = AdapterLaunchModeEnum.ASYNC_DIFF_MEM

    def __init__(self):
        self.generic_attributes = GenericAttributes()
        self.generic_attributes.subject = DataTypeMetaData.DEFAULT_SUBJECT
        self.file_handler = FilesHelper()
        self.storage_path = '.'
        # Will be populate with current running operation's identifier
        self.operation_id = None
        self.user_id = None
        self.submitted_form = None
        self.log = get_logger(self.__class__.__module__)

    @classmethod
    def get_group_name(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "name"):
            return cls._ui_group.name
        return None

    @classmethod
    def get_group_description(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "description"):
            return cls._ui_group.description
        return None

    @classmethod
    def get_ui_name(cls):
        if hasattr(cls, "_ui_name"):
            return cls._ui_name
        else:
            return cls.__name__

    @classmethod
    def get_ui_description(cls):
        if hasattr(cls, "_ui_description"):
            return cls._ui_description

    @classmethod
    def get_ui_subsection(cls):
        if hasattr(cls, "_ui_subsection"):
            return cls._ui_subsection

        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "subsection"):
            return cls._ui_group.subsection

    @staticmethod
    def can_be_active():
        """
        To be overridden where needed (e.g. Matlab dependent adapters).
        :return: By default True, and False when the current Adapter can not be executed in the current env
        for various reasons (e.g. no Matlab or Octave installed)
        """
        return True

    def submit_form(self, form):
        self.submitted_form = form

    # TODO separate usage of get_form_class (returning a class) and return of a submitted instance
    def get_form(self):
        if self.submitted_form is not None:
            return self.submitted_form
        return self.get_form_class()

    @abstractmethod
    def get_form_class(self):
        return None

    def get_adapter_fragments(self, view_model):
        """
        The result will be used for introspecting and checking operation changed input
        params from the defaults, to show in web gui.
        :return: a list of ABCAdapterForm classes, in case the current Adapter GUI
        will be composed of multiple sub-forms.
        """
        return {}

    def get_view_model_class(self):
        return self.get_form_class().get_view_model()

    @abstractmethod
    def get_output(self):
        """
        Describes inputs and outputs of the launch method.
        """

    def configure(self, view_model):
        """
        To be implemented in each Adapter that requires any specific configurations
        before the actual launch.
        """

    @abstractmethod
    def get_required_memory_size(self, view_model):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter.
        """

    @abstractmethod
    def get_required_disk_size(self, view_model):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter in kilo-Bytes.
        """

    def get_execution_time_approximation(self, view_model):
        """
        Method should approximate based on input arguments, the time it will take for the operation 
        to finish (in seconds).
        """
        return -1

    @abstractmethod
    def launch(self, view_model):
        """
         To be implemented in each Adapter.
         Will contain the logic of the Adapter.
         Takes a ViewModel with data, dependency direction is: Adapter -> Form -> ViewModel
         Any returned DataType will be stored in DB, by the Framework.
        :param view_model: the data model corresponding to the current adapter
        """

    def add_operation_additional_info(self, message):
        """
        Adds additional info on the operation to be displayed in the UI. Usually a warning message.
        """
        current_op = dao.get_operation_by_id(self.operation_id)
        current_op.additional_info = message
        dao.store_entity(current_op)

    def extract_operation_data(self, operation):
        operation = dao.get_operation_by_id(operation.id)
        project = dao.get_project_by_id(operation.fk_launched_in)
        self.storage_path = self.file_handler.get_project_folder(project, str(operation.id))
        self.operation_id = operation.id
        self.current_project_id = operation.project.id
        self.user_id = operation.fk_launched_by

    def _ensure_enough_resources(self, available_disk_space, view_model):
        # Compare the amount of memory the current algorithms states it needs,
        # with the average between the RAM available on the OS and the free memory at the current moment.
        # We do not consider only the free memory, because some OSs are freeing late and on-demand only.
        total_free_memory = psutil.virtual_memory().free + psutil.swap_memory().free
        total_existent_memory = psutil.virtual_memory().total + psutil.swap_memory().total
        memory_reference = (total_free_memory + total_existent_memory) / 2
        adapter_required_memory = self.get_required_memory_size(view_model)

        if adapter_required_memory > memory_reference:
            msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)."
            raise NoMemoryAvailableException(msg % (adapter_required_memory / 2 ** 30, memory_reference / 2 ** 30))

        # Compare the expected size of the operation results with the HDD space currently available for the user
        # TVB defines a quota per user.
        required_disk_space = self.get_required_disk_size(view_model)
        if available_disk_space < 0:
            msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution."
            raise NoMemoryAvailableException(msg % (- available_disk_space / 2 ** 10))
        if available_disk_space < required_disk_space:
            msg = ("You only have %.2f GB of disk space available but the operation you "
                   "launched might require %.2f Stopping execution...")
            raise NoMemoryAvailableException(msg % (available_disk_space / 2 ** 20, required_disk_space / 2 ** 20))
        return required_disk_space

    def _update_operation_entity(self, operation, required_disk_space):
        operation.start_now()
        operation.estimated_disk_size = required_disk_space
        dao.store_entity(operation)

    @nan_not_allowed()
    def _prelaunch(self, operation, view_model, uid=None, available_disk_space=0):
        """
        Method to wrap LAUNCH.
        Will prepare data, and store results on return.
        """
        self.extract_operation_data(operation)
        self.generic_attributes.fill_from(view_model.generic_attributes)
        self.configure(view_model)
        required_disk_size = self._ensure_enough_resources(available_disk_space, view_model)
        self._update_operation_entity(operation, required_disk_size)

        if not self.generic_attributes.user_tag_1:
            self.generic_attributes.user_tag_1 = uid
        else:
            self.generic_attributes.user_tag_2 = uid

        result = self.launch(view_model)

        if not isinstance(result, (list, tuple)):
            result = [result, ]
        self.__check_integrity(result)
        return self._capture_operation_results(result)

    def _capture_operation_results(self, result):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id

        count_stored = 0
        group_type = None  # In case of a group, the first not-none type is sufficient to memorize here
        for res in result:
            if res is None:
                continue
            if not res.fixed_generic_attributes:
                res.fill_from_generic_attributes(self.generic_attributes)
            res.fk_from_operation = self.operation_id
            res.fk_datatype_group = data_type_group_id

            associated_file = h5.path_for_stored_index(res)
            if os.path.exists(associated_file):
                if not res.fixed_generic_attributes:
                    with H5File.from_file(associated_file) as f:
                        f.store_generic_attributes(self.generic_attributes)
                # Compute size-on disk, in case file-storage is used
                res.disk_size = self.file_handler.compute_size_on_disk(associated_file)

            dao.store_entity(res)
            res.after_store()
            group_type = res.type
            count_stored += 1

        if count_stored > 0 and self._is_group_launch():
            # Update the operation group name
            operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
            operation_group.fill_operationgroup_name(group_type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(self.operation_id) + ' has finished.', count_stored

    def __check_integrity(self, result):
        """
        Check that the returned parameters for LAUNCH operation
        are of the type specified in the adapter's interface.
        """
        for result_entity in result:
            if result_entity is None:
                continue
            if not self.__is_data_in_supported_types(result_entity):
                msg = "Unexpected output DataType %s"
                raise InvalidParameterException(msg % type(result_entity))

    def __is_data_in_supported_types(self, data):

        if data is None:
            return True
        for supported_type in self.get_output():
            if isinstance(data, supported_type):
                return True
        # Data can't be mapped on any supported type !!
        return False

    def _is_group_launch(self):
        """
        Return true if this adapter is launched from a group of operations
        """
        operation = dao.get_operation_by_id(self.operation_id)
        return operation.fk_operation_group is not None

    def _get_output_path(self):
        return self.storage_path

    def load_entity_by_gid(self, data_gid):
        # type: (typing.Union[uuid.UUID, str]) -> DataType
        """
        Load a generic DataType, specified by GID.
        """
        idx = load_entity_by_gid(data_gid)
        if idx and self.generic_attributes.parent_burst is None:
            # Only in case the BurstConfiguration references hasn't been set already, take it from the current DT
            self.generic_attributes.parent_burst = idx.fk_parent_burst
        return idx

    def load_traited_by_gid(self, data_gid):
        # type: (typing.Union[uuid.UUID, str]) -> HasTraits
        """
        Load a generic HasTraits instance, specified by GID.
        """
        index = self.load_entity_by_gid(data_gid)
        return h5.load_from_index(index)

    def load_with_references(self, dt_gid):
        # type: (typing.Union[uuid.UUID, str]) -> HasTraits
        dt_index = self.load_entity_by_gid(dt_gid)
        h5_path = h5.path_for_stored_index(dt_index)
        dt, _ = h5.load_with_references(h5_path)
        return dt

    def view_model_to_has_traits(self, view_model):
        # type: (ViewModel) -> HasTraits
        has_traits_class = view_model.linked_has_traits
        has_traits = has_traits_class()
        view_model_class = type(view_model)
        if not has_traits_class:
            raise Exception("There is no linked HasTraits for this ViewModel {}".format(type(view_model)))
        for attr_name in has_traits_class.declarative_attrs:
            view_model_class_attr = getattr(view_model_class, attr_name)
            view_model_attr = getattr(view_model, attr_name)
            if isinstance(view_model_class_attr, DataTypeGidAttr) and view_model_attr:
                attr_value = self.load_with_references(view_model_attr)
            elif isinstance(view_model_class_attr, Attr) and isinstance(view_model_attr, ViewModel):
                attr_value = self.view_model_to_has_traits(view_model_attr)
            elif isinstance(view_model_class_attr, List) and len(view_model_attr) > 0 and isinstance(view_model_attr[0],
                                                                                                     ViewModel):
                attr_value = list()
                for view_model_elem in view_model_attr:
                    elem = self.view_model_to_has_traits(view_model_elem)
                    attr_value.append(elem)
            else:
                attr_value = view_model_attr
            setattr(has_traits, attr_name, attr_value)
        return has_traits

    @staticmethod
    def build_adapter_from_class(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        if not issubclass(adapter_class, ABCAdapter):
            raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
        try:
            stored_adapter = dao.get_algorithm_by_module(adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance
        except Exception as excep:
            LOGGER.exception(excep)
            raise IntrospectionException(str(excep))

    @staticmethod
    def determine_adapter_class(stored_adapter):
        # type: (Algorithm) -> ABCAdapter
        """
        Determine the class of an adapter based on module and classname strings from stored_adapter
        :param stored_adapter: Algorithm or AlgorithmDTO type
        :return: a subclass of ABCAdapter
        """
        ad_module = importlib.import_module(stored_adapter.module)
        adapter_class = getattr(ad_module, stored_adapter.classname)
        return adapter_class

    @staticmethod
    def build_adapter(stored_adapter):
        # type: (Algorithm) -> ABCAdapter
        """
        Having a module and a class name, create an instance of ABCAdapter.
        """
        try:
            adapter_class = ABCAdapter.determine_adapter_class(stored_adapter)
            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance

        except Exception:
            msg = "Could not load Adapter Instance for Stored row %s" % stored_adapter
            LOGGER.exception(msg)
            raise IntrospectionException(msg)

    def load_view_model(self, operation):
        storage_path = self.file_handler.get_project_folder(operation.project, str(operation.id))
        input_gid = operation.view_model_gid
        return h5.load_view_model(input_gid, storage_path)

    def array_size2kb(self, size):
        """
        :param size: size in bytes
        :return: size in kB
        """
        return size * TvbProfile.current.MAGIC_NUMBER / 8 / 2 ** 10
Пример #10
0
class FilesUpdateManager(UpdateManager):
    """
    Manager for updating H5 files version, when code gets changed.
    """

    UPDATE_SCRIPTS_SUFFIX = "_update_files"
    PROJECTS_PAGE_SIZE = 20
    DATA_TYPES_PAGE_SIZE = 500
    STATUS = True
    MESSAGE = "Done"

    def __init__(self):
        super(FilesUpdateManager, self).__init__(file_update_scripts,
                                                 TvbProfile.current.version.DATA_CHECKED_TO_VERSION,
                                                 TvbProfile.current.version.DATA_VERSION)
        self.files_helper = FilesHelper()

    def get_file_data_version(self, file_path):
        """
        Return the data version for the given file.

        :param file_path: the path on disk to the file for which you need the TVB data version
        :returns: a number representing the data version for which the input file was written
        """
        manager = self._get_manager(file_path)
        return manager.get_file_data_version()

    def is_file_up_to_date(self, file_path):
        """
        Returns True only if the data version of the file is equal with the
        data version specified into the TVB configuration file.
        """
        try:
            file_version = self.get_file_data_version(file_path)
        except MissingDataFileException as ex:
            self.log.exception(ex)
            return False
        except FileStructureException as ex:
            self.log.exception(ex)
            return False

        if file_version == TvbProfile.current.version.DATA_VERSION:
            return True
        return False

    def upgrade_file(self, input_file_name, datatype=None, burst_match_dict=None):
        """
        Upgrades the given file to the latest data version. The file will be upgraded
        sequentially, up until the current version from tvb.basic.config.settings.VersionSettings.DB_STRUCTURE_VERSION

        :param input_file_name the path to the file which needs to be upgraded
        :return True when update was successful and False when it resulted in an error.
        """
        if self.is_file_up_to_date(input_file_name):
            # Avoid running the DB update of size, when H5 is not being changed, to speed-up
            return True

        file_version = self.get_file_data_version(input_file_name)
        self.log.info("Updating from version %s , file: %s " % (file_version, input_file_name))
        for script_name in self.get_update_scripts(file_version):
            temp_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER,
                                          os.path.basename(input_file_name) + '.tmp')
            self.files_helper.copy_file(input_file_name, temp_file_path)
            try:
                self.run_update_script(script_name, input_file=input_file_name, burst_match_dict=burst_match_dict)
            except FileMigrationException as excep:
                self.files_helper.copy_file(temp_file_path, input_file_name)
                os.remove(temp_file_path)
                self.log.error(excep)
                return False

        if datatype:
            # Compute and update the disk_size attribute of the DataType in DB:
            datatype.disk_size = self.files_helper.compute_size_on_disk(input_file_name)
            dao.store_entity(datatype)

        return True

    def __upgrade_h5_list(self, h5_files):
        """
        Upgrade a list of DataTypes to the current version.

        :returns: (nr_of_dts_upgraded_fine, nr_of_dts_ignored) a two-tuple of integers representing
            the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
            the upgrade has failed.
        """
        nr_of_dts_upgraded_fine = 0
        nr_of_dts_failed = 0

        burst_match_dict = {}
        for path in h5_files:
            update_result = self.upgrade_file(path, burst_match_dict=burst_match_dict)

            if update_result:
                nr_of_dts_upgraded_fine += 1
            else:
                nr_of_dts_failed += 1

        return nr_of_dts_upgraded_fine, nr_of_dts_failed

    # TO DO: We should migrate the older scripts to Python 3 if we want to support migration for versions < 4
    def run_all_updates(self):
        """
        Upgrades all the data types from TVB storage to the latest data version.

        :returns: a two entry tuple (status, message) where status is a boolean that is True in case
            the upgrade was successfully for all DataTypes and False otherwise, and message is a status
            update message.
        """
        if TvbProfile.current.version.DATA_CHECKED_TO_VERSION < TvbProfile.current.version.DATA_VERSION:
            total_count = dao.count_all_datatypes()

            self.log.info("Starting to run H5 file updates from version %d to %d, for %d datatypes" % (
                TvbProfile.current.version.DATA_CHECKED_TO_VERSION,
                TvbProfile.current.version.DATA_VERSION, total_count))

            # Keep track of how many DataTypes were properly updated and how many
            # were marked as invalid due to missing files or invalid manager.
            start_time = datetime.now()

            file_paths = self.get_all_h5_paths()
            total_count = len(file_paths)
            no_ok, no_error = self.__upgrade_h5_list(file_paths)

            self.log.info("Updated H5 files in total: %d [fine:%d, failed:%d in: %s min]" % (
                total_count, no_ok, no_error, int((datetime.now() - start_time).seconds / 60)))
            delete_old_burst_table_after_migration()

            # Now update the configuration file since update was done
            config_file_update_dict = {stored.KEY_LAST_CHECKED_FILE_VERSION: TvbProfile.current.version.DATA_VERSION}

            if no_error == 0:
                # Everything went fine
                config_file_update_dict[stored.KEY_FILE_STORAGE_UPDATE_STATUS] = FILE_STORAGE_VALID
                FilesUpdateManager.STATUS = True
                FilesUpdateManager.MESSAGE = ("File upgrade finished successfully for all %s entries. "
                                              "Thank you for your patience!" % total_count)
                self.log.info(FilesUpdateManager.MESSAGE)
            else:
                # Something went wrong
                config_file_update_dict[stored.KEY_FILE_STORAGE_UPDATE_STATUS] = FILE_STORAGE_INVALID
                FilesUpdateManager.STATUS = False
                FilesUpdateManager.MESSAGE = ("Out of %s stored DataTypes, %s were upgraded successfully, but %s had "
                                              "faults and were marked invalid" % (total_count, no_ok, no_error))
                self.log.warning(FilesUpdateManager.MESSAGE)

            TvbProfile.current.version.DATA_CHECKED_TO_VERSION = TvbProfile.current.version.DATA_VERSION
            TvbProfile.current.manager.add_entries_to_config_file(config_file_update_dict)

    @staticmethod
    def _get_manager(file_path):
        """
        Returns a storage manager.
        """
        folder, file_name = os.path.split(file_path)
        return HDF5StorageManager(folder, file_name)

    @staticmethod
    def get_all_h5_paths():
        """
        This method returns a list of all h5 files and it is used in the migration from version 4 to 5.
        The h5 files inside a certain project are retrieved in numerical order (1, 2, 3 etc.).
        """
        h5_files = []
        projects_folder = FilesHelper().get_projects_folder()

        for project_path in os.listdir(projects_folder):
            # Getting operation folders inside the current project
            project_full_path = os.path.join(projects_folder, project_path)
            try:
                project_operations = os.listdir(project_full_path)
            except NotADirectoryError:
                continue
            project_operations_base_names = [os.path.basename(op) for op in project_operations]

            for op_folder in project_operations_base_names:
                try:
                    int(op_folder)
                    op_folder_path = os.path.join(project_full_path, op_folder)
                    for file in os.listdir(op_folder_path):
                        if file.endswith(FilesHelper.TVB_STORAGE_FILE_EXTENSION):
                            h5_file = os.path.join(op_folder_path, file)
                            try:
                                if FilesUpdateManager._is_empty_file(h5_file):
                                    continue
                                h5_files.append(h5_file)
                            except FileStructureException:
                                continue
                except ValueError:
                    pass

        # Sort all h5 files based on their creation date stored in the files themselves
        sorted_h5_files = sorted(h5_files, key=lambda h5_path: FilesUpdateManager._get_create_date_for_sorting(
            h5_path) or datetime.now())
        return sorted_h5_files

    @staticmethod
    def _is_empty_file(h5_file):
        return H5File.get_metadata_param(h5_file, 'Create_date') is None

    @staticmethod
    def _get_create_date_for_sorting(h5_file):
        create_date_str = str(H5File.get_metadata_param(h5_file, 'Create_date'), 'utf-8')
        create_date = string2date(create_date_str, date_format='datetime:%Y-%m-%d %H:%M:%S.%f')
        return create_date
Пример #11
0
class ABCAdapter(object):
    """
    Root Abstract class for all TVB Adapters. 
    """
    # todo this constants copy is not nice
    TYPE_SELECT = input_tree.TYPE_SELECT
    TYPE_MULTIPLE = input_tree.TYPE_MULTIPLE
    STATIC_ACCEPTED_TYPES = input_tree.STATIC_ACCEPTED_TYPES
    KEY_TYPE = input_tree.KEY_TYPE
    KEY_OPTIONS = input_tree.KEY_OPTIONS
    KEY_ATTRIBUTES = input_tree.KEY_ATTRIBUTES
    KEY_NAME = input_tree.KEY_NAME
    KEY_DESCRIPTION = input_tree.KEY_DESCRIPTION
    KEY_VALUE = input_tree.KEY_VALUE
    KEY_LABEL = input_tree.KEY_LABEL
    KEY_DEFAULT = input_tree.KEY_DEFAULT
    KEY_DATATYPE = input_tree.KEY_DATATYPE
    KEY_DTYPE = input_tree.KEY_DTYPE
    KEY_DISABLED = input_tree.KEY_DISABLED
    KEY_ALL = input_tree.KEY_ALL
    KEY_CONDITION = input_tree.KEY_CONDITION
    KEY_FILTERABLE = input_tree.KEY_FILTERABLE
    KEY_REQUIRED = input_tree.KEY_REQUIRED
    KEY_ID = input_tree.KEY_ID
    KEY_UI_HIDE = input_tree.KEY_UI_HIDE

    # TODO: move everything related to parameters PRE + POST into parameters_factory
    KEYWORD_PARAMS = input_tree.KEYWORD_PARAMS
    KEYWORD_SEPARATOR = input_tree.KEYWORD_SEPARATOR
    KEYWORD_OPTION = input_tree.KEYWORD_OPTION

    INTERFACE_ATTRIBUTES_ONLY = interface.INTERFACE_ATTRIBUTES_ONLY
    INTERFACE_ATTRIBUTES = interface.INTERFACE_ATTRIBUTES

    # model.Algorithm instance that will be set for each adapter created by in build_adapter method
    stored_adapter = None

    __metaclass__ = ABCMeta

    def __init__(self):
        # It will be populate with key from DataTypeMetaData
        self.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT
        }
        self.file_handler = FilesHelper()
        self.storage_path = '.'
        # Will be populate with current running operation's identifier
        self.operation_id = None
        self.user_id = None
        self.log = get_logger(self.__class__.__module__)
        self.tree_manager = InputTreeManager()

    @classmethod
    def get_group_name(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "name"):
            return cls._ui_group.name
        return None

    @classmethod
    def get_group_description(cls):
        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "description"):
            return cls._ui_group.description
        return None

    @classmethod
    def get_ui_name(cls):
        if hasattr(cls, "_ui_name"):
            return cls._ui_name
        else:
            return cls.__name__

    @classmethod
    def get_ui_description(cls):
        if hasattr(cls, "_ui_description"):
            return cls._ui_description

    @classmethod
    def get_ui_subsection(cls):
        if hasattr(cls, "_ui_subsection"):
            return cls._ui_subsection

        if hasattr(cls, "_ui_group") and hasattr(cls._ui_group, "subsection"):
            return cls._ui_group.subsection

    @staticmethod
    def can_be_active():
        """
        To be overridden where needed (e.g. Matlab dependent adapters).
        :return: By default True, and False when the current Adapter can not be executed in the current env
        for various reasons (e.g. no Matlab or Octave installed)
        """
        return True

    @abstractmethod
    def get_input_tree(self):
        """
        Describes inputs and outputs of the launch method.
        """

    @abstractmethod
    def get_output(self):
        """
        Describes inputs and outputs of the launch method.
        """

    def configure(self, **kwargs):
        """
        To be implemented in each Adapter that requires any specific configurations
        before the actual launch.
        """

    @abstractmethod
    def get_required_memory_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter.
        """

    @abstractmethod
    def get_required_disk_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter in kilo-Bytes.
        """

    def get_execution_time_approximation(self, **kwargs):
        """
        Method should approximate based on input arguments, the time it will take for the operation 
        to finish (in seconds).
        """
        return -1

    @abstractmethod
    def launch(self):
        """
         To be implemented in each Adapter.
         Will contain the logic of the Adapter.
         Any returned DataType will be stored in DB, by the Framework.
        """

    def add_operation_additional_info(self, message):
        """
        Adds additional info on the operation to be displayed in the UI. Usually a warning message.
        """
        current_op = dao.get_operation_by_id(self.operation_id)
        current_op.additional_info = message
        dao.store_entity(current_op)

    @nan_not_allowed()
    def _prelaunch(self,
                   operation,
                   uid=None,
                   available_disk_space=0,
                   **kwargs):
        """
        Method to wrap LAUNCH.
        Will prepare data, and store results on return. 
        """
        self.meta_data.update(json.loads(operation.meta_data))
        self.storage_path = self.file_handler.get_project_folder(
            operation.project, str(operation.id))
        self.operation_id = operation.id
        self.current_project_id = operation.project.id
        self.user_id = operation.fk_launched_by

        self.configure(**kwargs)

        # Compare the amount of memory the current algorithms states it needs,
        # with the average between the RAM available on the OS and the free memory at the current moment.
        # We do not consider only the free memory, because some OSs are freeing late and on-demand only.
        total_free_memory = psutil.virtual_memory().free + psutil.swap_memory(
        ).free
        total_existent_memory = psutil.virtual_memory(
        ).total + psutil.swap_memory().total
        memory_reference = (total_free_memory + total_existent_memory) / 2
        adapter_required_memory = self.get_required_memory_size(**kwargs)

        if adapter_required_memory > memory_reference:
            msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)."
            raise NoMemoryAvailableException(
                msg %
                (adapter_required_memory / 2**30, memory_reference / 2**30))

        # Compare the expected size of the operation results with the HDD space currently available for the user
        # TVB defines a quota per user.
        required_disk_space = self.get_required_disk_size(**kwargs)
        if available_disk_space < 0:
            msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution."
            raise NoMemoryAvailableException(msg %
                                             (-available_disk_space / 2**10))
        if available_disk_space < required_disk_space:
            msg = (
                "You only have %.2f GB of disk space available but the operation you "
                "launched might require %.2f Stopping execution...")
            raise NoMemoryAvailableException(
                msg %
                (available_disk_space / 2**20, required_disk_space / 2**20))

        operation.start_now()
        operation.estimated_disk_size = required_disk_space
        dao.store_entity(operation)

        result = self.launch(**kwargs)

        if not isinstance(result, (list, tuple)):
            result = [
                result,
            ]
        self.__check_integrity(result)

        return self._capture_operation_results(result, uid)

    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(
                datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(
                operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(
                    res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path,
                                               res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(
                    associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(
                operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(
            self.operation_id) + ' has finished.', len(results_to_store)

    def __check_integrity(self, result):
        """
         Check that the returned parameters for LAUNCH operation
        are of the type specified in the adapter's interface.
        """
        entity_id = self.__module__ + '.' + self.__class__.__name__

        for result_entity in result:
            if type(result_entity) == list and len(result_entity) > 0:
                #### Determine the first element not None
                first_item = None
                for res in result_entity:
                    if res is not None:
                        first_item = res
                        break
                if first_item is None:
                    return
                    #### All list items are None
                #### Now check if the first item has a supported type
                if not self.__is_data_in_supported_types(first_item):
                    msg = "Unexpected DataType %s"
                    raise InvalidParameterException(msg % type(first_item))

                first_item_type = type(first_item)
                for res in result_entity:
                    if not isinstance(res, first_item_type):
                        msg = '%s-Heterogeneous types (%s).Expected %s list.'
                        raise InvalidParameterException(
                            msg % (entity_id, type(res), first_item_type))
            else:
                if not self.__is_data_in_supported_types(result_entity):
                    msg = "Unexpected DataType %s"
                    raise InvalidParameterException(msg % type(result_entity))

    def __is_data_in_supported_types(self, data):
        """
        This method checks if the provided data is one of the adapter supported return types 
        """
        if data is None:
            return True
        for supported_type in self.get_output():
            if isinstance(data, supported_type):
                return True
        ##### Data can't be mapped on any supported type !!
        return False

    def _is_group_launch(self):
        """
        Return true if this adapter is launched from a group of operations
        """
        operation = dao.get_operation_by_id(self.operation_id)
        return operation.fk_operation_group is not None

    @staticmethod
    def load_entity_by_gid(data_gid):
        """
        Load a generic DataType, specified by GID.
        """
        return load_entity_by_gid(data_gid)

    @staticmethod
    def build_adapter_from_class(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        if not issubclass(adapter_class, ABCAdapter):
            raise IntrospectionException(
                "Invalid data type: It should extend adapters.ABCAdapter!")
        try:
            stored_adapter = dao.get_algorithm_by_module(
                adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.stored_adapter = stored_adapter
            return adapter_instance
        except Exception, excep:
            LOGGER.exception(excep)
            raise IntrospectionException(str(excep))
Пример #12
0
class ABCAdapter(object):
    """
    Root Abstract class for all TVB Adapters. 
    """
    # todo this constants copy is not nice
    TYPE_SELECT = input_tree.TYPE_SELECT
    TYPE_MULTIPLE = input_tree.TYPE_MULTIPLE
    STATIC_ACCEPTED_TYPES = input_tree.STATIC_ACCEPTED_TYPES
    KEY_TYPE = input_tree.KEY_TYPE
    KEY_OPTIONS = input_tree.KEY_OPTIONS
    KEY_ATTRIBUTES = input_tree.KEY_ATTRIBUTES
    KEY_NAME = input_tree.KEY_NAME
    KEY_DESCRIPTION = input_tree.KEY_DESCRIPTION
    KEY_VALUE = input_tree.KEY_VALUE
    KEY_LABEL = input_tree.KEY_LABEL
    KEY_DEFAULT = input_tree.KEY_DEFAULT
    KEY_DATATYPE = input_tree.KEY_DATATYPE
    KEY_DTYPE = input_tree.KEY_DTYPE
    KEY_DISABLED = input_tree.KEY_DISABLED
    KEY_ALL = input_tree.KEY_ALL
    KEY_CONDITION = input_tree.KEY_CONDITION
    KEY_FILTERABLE = input_tree.KEY_FILTERABLE
    KEY_REQUIRED = input_tree.KEY_REQUIRED
    KEY_ID = input_tree.KEY_ID
    KEY_UI_HIDE = input_tree.KEY_UI_HIDE

    # TODO: move everything related to parameters PRE + POST into parameters_factory
    KEYWORD_PARAMS = input_tree.KEYWORD_PARAMS
    KEYWORD_SEPARATOR = input_tree.KEYWORD_SEPARATOR
    KEYWORD_OPTION = input_tree.KEYWORD_OPTION

    INTERFACE_ATTRIBUTES_ONLY = interface.INTERFACE_ATTRIBUTES_ONLY
    INTERFACE_ATTRIBUTES = interface.INTERFACE_ATTRIBUTES

    # Group that will be set for each adapter created by in build_adapter method
    algorithm_group = None

    _ui_display = 1

    __metaclass__ = ABCMeta


    def __init__(self):
        # It will be populate with key from DataTypeMetaData
        self.meta_data = {DataTypeMetaData.KEY_SUBJECT: DataTypeMetaData.DEFAULT_SUBJECT}
        self.file_handler = FilesHelper()
        self.storage_path = '.'
        # Will be populate with current running operation's identifier
        self.operation_id = None
        self.user_id = None
        self.log = get_logger(self.__class__.__module__)
        self.tree_manager = InputTreeManager()


    @abstractmethod
    def get_input_tree(self):
        """
        Describes inputs and outputs of the launch method.
        """


    @abstractmethod
    def get_output(self):
        """
        Describes inputs and outputs of the launch method.
        """


    def configure(self, **kwargs):
        """
        To be implemented in each Adapter that requires any specific configurations
        before the actual launch.
        """


    @abstractmethod
    def get_required_memory_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter.
        """


    @abstractmethod
    def get_required_disk_size(self, **kwargs):
        """
        Abstract method to be implemented in each adapter. Should return the required memory
        for launching the adapter in kilo-Bytes.
        """


    def get_execution_time_approximation(self, **kwargs):
        """
        Method should approximate based on input arguments, the time it will take for the operation 
        to finish (in seconds).
        """
        return -1


    @abstractmethod
    def launch(self):
        """
         To be implemented in each Adapter.
         Will contain the logic of the Adapter.
         Any returned DataType will be stored in DB, by the Framework.
        """


    def add_operation_additional_info(self, message):
        """
        Adds additional info on the operation to be displayed in the UI. Usually a warning message.
        """
        current_op = dao.get_operation_by_id(self.operation_id)
        current_op.additional_info = message
        dao.store_entity(current_op)


    @nan_not_allowed()
    def _prelaunch(self, operation, uid=None, available_disk_space=0, **kwargs):
        """
        Method to wrap LAUNCH.
        Will prepare data, and store results on return. 
        """
        self.meta_data.update(json.loads(operation.meta_data))
        self.storage_path = self.file_handler.get_project_folder(operation.project, str(operation.id))
        self.operation_id = operation.id
        self.current_project_id = operation.project.id
        self.user_id = operation.fk_launched_by

        self.configure(**kwargs)

        # Compare the amount of memory the current algorithms states it needs,
        # with the average between the RAM available on the OS and the free memory at the current moment.
        # We do not consider only the free memory, because some OSs are freeing late and on-demand only.
        total_free_memory = psutil.virtual_memory().free + psutil.swap_memory().free
        total_existent_memory = psutil.virtual_memory().total + psutil.swap_memory().total
        memory_reference = (total_free_memory + total_existent_memory) / 2
        adapter_required_memory = self.get_required_memory_size(**kwargs)

        if adapter_required_memory > memory_reference:
            msg = "Machine does not have enough RAM memory for the operation (expected %.2g GB, but found %.2g GB)."
            raise NoMemoryAvailableException(msg % (adapter_required_memory / 2 ** 30, memory_reference / 2 ** 30))

        # Compare the expected size of the operation results with the HDD space currently available for the user
        # TVB defines a quota per user.
        required_disk_space = self.get_required_disk_size(**kwargs)
        if available_disk_space < 0:
            msg = "You have exceeded you HDD space quota by %.2f MB Stopping execution."
            raise NoMemoryAvailableException(msg % (- available_disk_space / 2 ** 10))
        if available_disk_space < required_disk_space:
            msg = ("You only have %.2f GB of disk space available but the operation you "
                   "launched might require %.2f Stopping execution...")
            raise NoMemoryAvailableException(msg % (available_disk_space / 2 ** 20, required_disk_space / 2 ** 20))

        operation.start_now()
        operation.estimated_disk_size = required_disk_space
        dao.store_entity(operation)

        result = self.launch(**kwargs)

        if not isinstance(result, (list, tuple)):
            result = [result, ]
        self.__check_integrity(result)

        return self._capture_operation_results(result, uid)


    def _capture_operation_results(self, result, user_tag=None):
        """
        After an operation was finished, make sure the results are stored
        in DB storage and the correct meta-data,IDs are set.
        """
        results_to_store = []
        data_type_group_id = None
        operation = dao.get_operation_by_id(self.operation_id)
        if operation.user_group is None or len(operation.user_group) == 0:
            operation.user_group = date2string(datetime.now(), date_format=LESS_COMPLEX_TIME_FORMAT)
            operation = dao.store_entity(operation)
        if self._is_group_launch():
            data_type_group_id = dao.get_datatypegroup_by_op_group_id(operation.fk_operation_group).id
        # All entities will have the same subject and state
        subject = self.meta_data[DataTypeMetaData.KEY_SUBJECT]
        state = self.meta_data[DataTypeMetaData.KEY_STATE]
        burst_reference = None
        if DataTypeMetaData.KEY_BURST in self.meta_data:
            burst_reference = self.meta_data[DataTypeMetaData.KEY_BURST]
        perpetuated_identifier = None
        if DataTypeMetaData.KEY_TAG_1 in self.meta_data:
            perpetuated_identifier = self.meta_data[DataTypeMetaData.KEY_TAG_1]

        for res in result:
            if res is None:
                continue
            res.subject = str(subject)
            res.state = state
            res.fk_parent_burst = burst_reference
            res.fk_from_operation = self.operation_id
            res.framework_metadata = self.meta_data
            if not res.user_tag_1:
                res.user_tag_1 = user_tag if user_tag is not None else perpetuated_identifier
            else:
                res.user_tag_2 = user_tag if user_tag is not None else perpetuated_identifier
            res.fk_datatype_group = data_type_group_id
            ## Compute size-on disk, in case file-storage is used
            if hasattr(res, 'storage_path') and hasattr(res, 'get_storage_file_name'):
                associated_file = os.path.join(res.storage_path, res.get_storage_file_name())
                res.close_file()
                res.disk_size = self.file_handler.compute_size_on_disk(associated_file)
            res = dao.store_entity(res)
            # Write metaData
            res.persist_full_metadata()
            results_to_store.append(res)
        del result[0:len(result)]
        result.extend(results_to_store)

        if len(result) and self._is_group_launch():
            ## Update the operation group name
            operation_group = dao.get_operationgroup_by_id(operation.fk_operation_group)
            operation_group.fill_operationgroup_name(result[0].type)
            dao.store_entity(operation_group)

        return 'Operation ' + str(self.operation_id) + ' has finished.', len(results_to_store)


    def __check_integrity(self, result):
        """
         Check that the returned parameters for LAUNCH operation
        are of the type specified in the adapter's interface.
        """
        entity_id = self.__module__ + '.' + self.__class__.__name__

        for result_entity in result:
            if type(result_entity) == list and len(result_entity) > 0:
                #### Determine the first element not None
                first_item = None
                for res in result_entity:
                    if res is not None:
                        first_item = res
                        break
                if first_item is None:
                    return
                    #### All list items are None
                #### Now check if the first item has a supported type
                if not self.__is_data_in_supported_types(first_item):
                    msg = "Unexpected DataType %s"
                    raise Exception(msg % type(first_item))

                first_item_type = type(first_item)
                for res in result_entity:
                    if not isinstance(res, first_item_type):
                        msg = '%s-Heterogeneous types (%s).Expected %s list.'
                        raise Exception(msg % (entity_id, type(res), first_item_type))
            else:
                if not self.__is_data_in_supported_types(result_entity):
                    msg = "Unexpected DataType %s"
                    raise Exception(msg % type(result_entity))


    def __is_data_in_supported_types(self, data):
        """
        This method checks if the provided data is one of the adapter supported return types 
        """
        if data is None:
            return True
        for supported_type in self.get_output():
            if isinstance(data, supported_type):
                return True
        ##### Data can't be mapped on any supported type !!
        return False


    def _is_group_launch(self):
        """
        Return true if this adapter is launched from a group of operations
        """
        operation = dao.get_operation_by_id(self.operation_id)
        return operation.fk_operation_group is not None


    @staticmethod
    def load_entity_by_gid(data_gid):
        """
        Load a generic DataType, specified by GID.
        """
        return load_entity_by_gid(data_gid)


    @staticmethod
    def prepare_adapter(adapter_class):
        """
        Having a subclass of ABCAdapter, prepare an instance for launching an operation with it.
        """
        try:
            if not issubclass(adapter_class, ABCAdapter):
                raise IntrospectionException("Invalid data type: It should extend adapters.ABCAdapter!")
            algo_group = dao.find_group(adapter_class.__module__, adapter_class.__name__)

            adapter_instance = adapter_class()
            adapter_instance.algorithm_group = algo_group
            return adapter_instance
        except Exception, excep:
            get_logger("ABCAdapter").exception(excep)
            raise IntrospectionException(str(excep))