示例#1
0
    def set_remote_computer_exec(self, remote_computer_exec):
        """
        Set the code as remote, and pass the computer on which it resides
        and the absolute path on that computer.

        :param remote_computer_exec: a tuple (computer, remote_exec_path), where computer is a aiida.orm.Computer and
            remote_exec_path is the absolute path of the main executable on remote computer.
        """
        from aiida import orm
        from aiida.common.lang import type_check

        if (not isinstance(remote_computer_exec, (list, tuple))
                or len(remote_computer_exec) != 2):
            raise ValueError('remote_computer_exec must be a list or tuple '
                             'of length 2, with machine and executable '
                             'name')

        computer, remote_exec_path = tuple(remote_computer_exec)

        if not os.path.isabs(remote_exec_path):
            raise ValueError(
                'exec_path must be an absolute path (on the remote machine)')

        type_check(computer, orm.Computer)

        self._set_remote()
        self.computer = computer
        self.set_attribute('remote_exec_path', remote_exec_path)
示例#2
0
    def create_from_folder(cls, dirpath, label, *, description='', pseudo_type=None, deduplicate=True):
        """Create a new ``PseudoPotentialFamily`` from the pseudo potentials contained in a directory.

        :param dirpath: absolute path to the folder containing the UPF files.
        :param label: label to give to the ``PseudoPotentialFamily``, should not already exist.
        :param description: description to give to the family.
        :param pseudo_type: subclass of ``PseudoPotentialData`` to be used for the parsed pseudos. If not specified and
            the family only defines a single supported pseudo type in ``_pseudo_types`` then that will be used otherwise
            a ``ValueError`` is raised.
        :param deduplicate: if True, will scan database for existing pseudo potentials of same type and with the same
            md5 checksum, and use that instead of the parsed one.
        :raises ValueError: if a ``PseudoPotentialFamily`` already exists with the given name.
        :raises ValueError: if ``dirpath`` is not a directory or contains anything other than files.
        :raises ValueError: if ``dirpath`` contains multiple pseudo potentials for the same element.
        :raises ValueError: if ``pseudo_type`` is explicitly specified and is not supported by this family class.
        :raises ValueError: if ``pseudo_type`` is not specified and the class supports more than one pseudo type.
        :raises ParsingError: if the constructor of the pseudo type fails for one of the files in the ``dirpath``.
        """
        type_check(description, str, allow_none=True)

        try:
            cls.objects.get(label=label)
        except exceptions.NotExistent:
            family = cls(label=label, description=description)
        else:
            raise ValueError(f'the {cls.__name__} `{label}` already exists')

        pseudos = cls.parse_pseudos_from_directory(dirpath, pseudo_type, deduplicate=deduplicate)

        # Only store the ``Group`` and the pseudo nodes now, such that we don't have to worry about the clean up in the
        # case that an exception is raised during creating them.
        family.store()
        family.add_nodes([pseudo.store() for pseudo in pseudos])

        return family
示例#3
0
    def stash_mode(self, value: StashMode):
        """Set the mode with which the data was stashed on the remote.

        :param value: the stash mode.
        """
        type_check(value, StashMode)
        self.set_attribute('stash_mode', value.value)
示例#4
0
    def get(cls, **kwargs):
        """
        Custom get for group which can be used to get a group with the given attributes

        :param kwargs: the attributes to match the group to

        :return: the group
        :type nodes: :class:`aiida.orm.Node` or list
        """
        from aiida.orm import QueryBuilder

        filters = {}
        if 'type_string' in kwargs:
            type_check(kwargs['type_string'], str)

        query = QueryBuilder()
        for key, val in kwargs.items():
            filters[key] = val

        query.append(cls, filters=filters)
        results = query.all()
        if len(results) > 1:
            raise exceptions.MultipleObjectsError("Found {} groups matching criteria '{}'".format(len(results), kwargs))
        if not results:
            raise exceptions.NotExistent("No group found matching criteria '{}'".format(kwargs))
        return results[0][0]
示例#5
0
    def source_list(self, value: typing.Union[typing.List, typing.Tuple]):
        """Set the list of source files that were stashed.

        :param value: the list of source files.
        """
        type_check(value, (list, tuple))
        self.set_attribute('source_list', value)
示例#6
0
    def target_basepath(self, value: str):
        """Set the target basepath.

        :param value: the target basepath.
        """
        type_check(value, str)
        self.set_attribute('target_basepath', value)
示例#7
0
    def __init__(self,
                 create_if_not_exist=False,
                 sub_classes=('aiida.groups:core', )):
        """Construct the parameter type.

        The `sub_classes` argument can be used to narrow the set of subclasses of `Group` that should be matched. By
        default all subclasses of `Group` will be matched, otherwise it is restricted to the subclasses that correspond
        to the entry point names in the tuple of `sub_classes`.

        To prevent having to load the database environment at import time, the actual loading of the entry points is
        deferred until the call to `convert` is made. This is to keep the command line autocompletion light and
        responsive. The entry point strings will be validated, however, to see if they correspond to known entry points.

        :param create_if_not_exist: boolean, if True, will create the group if it does not yet exist. By default the
            group created will be of class `Group`, unless another subclass is specified through `sub_classes`. Note
            that in this case, only a single entry point name can be specified
        :param sub_classes: a tuple of entry point strings from the `aiida.groups` entry point group.
        """
        type_check(sub_classes, tuple, allow_none=True)

        if create_if_not_exist and len(sub_classes) > 1:
            raise ValueError(
                '`sub_classes` can at most contain one entry point if `create_if_not_exist=True`'
            )

        self._create_if_not_exist = create_if_not_exist
        super().__init__(sub_classes=sub_classes)
示例#8
0
def get_use_cache(node_class=None, identifier=None):
    """Return whether the caching mechanism should be used for the given entry point according to the configuration.

    :param node_class: the Node class or sub class to check if enabled for caching
    :param identifier: the full entry point string of the process, e.g. `aiida.calculations:arithmetic.add`
    :return: boolean, True if caching is enabled, False otherwise
    :raises ValueError: if the configuration is invalid by defining the class both enabled and disabled
    """
    from aiida.common.lang import type_check

    if node_class is not None:
        warnings.warn(  # pylint: disable=no-member
            'the `node_class` argument is deprecated and will be removed in `v2.0.0`. '
            'Use the `identifier` argument instead', AiidaDeprecationWarning)

    if identifier is not None:
        type_check(identifier, str)

        enabled = identifier in _CONFIG[ConfigKeys.ENABLED.value]
        disabled = identifier in _CONFIG[ConfigKeys.DISABLED.value]

        if enabled and disabled:
            raise ValueError(
                'Invalid configuration: caching for {} is both enabled and disabled.'
                .format(identifier))
        elif enabled:
            return True
        elif disabled:
            return False

    return _CONFIG[ConfigKeys.DEFAULT.value]
示例#9
0
def migrate_recursively(metadata, data, folder, version=EXPORT_VERSION):
    """Recursive migration of export files from v0.1 to a newer version.

    See specific migration functions for detailed descriptions.

    :param metadata: the content of an export archive metadata.json file
    :param data: the content of an export archive data.json file
    :param folder: SandboxFolder in which the archive has been unpacked (workdir)
    :param version: the version to migrate to, by default the current export version
    """
    old_version = verify_metadata_version(metadata)

    type_check(version, str)

    try:
        if old_version == version:
            raise ArchiveMigrationError('Your export file is already at the version {}'.format(version))
        elif old_version > version:
            raise ArchiveMigrationError('Backward migrations are not supported')
        elif old_version in MIGRATE_FUNCTIONS:
            MIGRATE_FUNCTIONS[old_version](metadata, data, folder)
        else:
            raise ArchiveMigrationError('Cannot migrate from version {}'.format(old_version))
    except ValueError as exception:
        raise ArchiveMigrationError(exception)
    except DanglingLinkError:
        raise ArchiveMigrationError('Export file is invalid because it contains dangling links')

    new_version = verify_metadata_version(metadata)

    if new_version < version:
        new_version = migrate_recursively(metadata, data, folder, version)

    return new_version
示例#10
0
    def user(self, user):
        """Set the user of this node.

        :param user: a `BackendUser`
        """
        type_check(user, DjangoUser)
        self._dbmodel.user = user.dbmodel
示例#11
0
def export_tar(entities=None, filename=None, **kwargs):
    """Export the entries passed in the 'entities' list to a gzipped tar file.

    .. deprecated:: 1.2.1
        Support for the parameters `what` and `outfile` will be removed in `v2.0.0`.
        Please use `entities` and `filename` instead, respectively.

    :param entities: a list of entity instances; they can belong to different models/entities.
    :type entities: list

    :param filename: the filename (possibly including the absolute path) of the file on which to export.
    :type filename: str
    """
    # Backwards-compatibility
    entities = deprecated_parameters(
        old={
            'name': 'what',
            'value': kwargs.pop('what', None)
        },
        new={
            'name': 'entities',
            'value': entities
        },
    )
    filename = deprecated_parameters(
        old={
            'name': 'outfile',
            'value': kwargs.pop('outfile', None)
        },
        new={
            'name': 'filename',
            'value': filename
        },
    )

    type_check(
        entities, (list, tuple, set),
        msg='`entities` must be specified and given as a list of AiiDA entities'
    )
    entities = list(entities)

    if type_check(filename, str, allow_none=True) is None:
        filename = 'export_data.aiida'

    with SandboxFolder() as folder:
        time_export_start = time.time()
        export_tree(entities=entities, folder=folder, **kwargs)
        time_export_end = time.time()

        with tarfile.open(filename,
                          'w:gz',
                          format=tarfile.PAX_FORMAT,
                          dereference=True) as tar:
            time_compress_start = time.time()
            tar.add(folder.abspath, arcname='')
            time_compress_end = time.time()

    return (time_export_start, time_export_end, time_compress_start,
            time_compress_end)
示例#12
0
    def user(self, user):
        """Set the user.

        :param user: the user
        :type user: :class:`aiida.orm.User`
        """
        type_check(user, users.User)
        self._backend_entity.user = user.backend_entity
示例#13
0
    def family_uuid(self, value):
        """Set the UUID of the `SsspFamily` to which this parameters instance is associated.

        :param value: the UUID of the associated `SsspFamily`.
        :raises: `~aiida.common.exceptions.ModificationNotAllowed`
        """
        type_check(value, (str, UUID))
        return self.set_attribute(self.KEY_FAMILY_UUID, str(value))
示例#14
0
def get_use_cache(*, identifier=None):
    """Return whether the caching mechanism should be used for the given process type according to the configuration.

    :param identifier: Process type string of the node
    :type identifier: str
    :return: boolean, True if caching is enabled, False otherwise
    :raises: `~aiida.common.exceptions.ConfigurationError` if the configuration is invalid, either due to a general
        configuration error, or by defining the class both enabled and disabled
    """
    type_check(identifier, str, allow_none=True)

    if identifier is not None:
        type_check(identifier, str)

        enable_matches = [
            pattern for pattern in _CONFIG[ConfigKeys.ENABLED.value]
            if _match_wildcard(string=identifier, pattern=pattern)
        ]
        disable_matches = [
            pattern for pattern in _CONFIG[ConfigKeys.DISABLED.value]
            if _match_wildcard(string=identifier, pattern=pattern)
        ]

        if enable_matches and disable_matches:
            # If both enable and disable have matching identifier, we search for
            # the most specific one. This is determined by checking whether
            # all other patterns match the specific pattern.
            PatternWithResult = namedtuple('PatternWithResult', ['pattern', 'use_cache'])
            most_specific = []
            for specific_pattern in enable_matches:
                if all(
                    _match_wildcard(string=specific_pattern, pattern=other_pattern)
                    for other_pattern in enable_matches + disable_matches
                ):
                    most_specific.append(PatternWithResult(pattern=specific_pattern, use_cache=True))
            for specific_pattern in disable_matches:
                if all(
                    _match_wildcard(string=specific_pattern, pattern=other_pattern)
                    for other_pattern in enable_matches + disable_matches
                ):
                    most_specific.append(PatternWithResult(pattern=specific_pattern, use_cache=False))

            if len(most_specific) > 1:
                raise exceptions.ConfigurationError((
                    'Invalid configuration: multiple matches for identifier {}'
                    ', but the most specific identifier is not unique. Candidates: {}'
                ).format(identifier, [match.pattern for match in most_specific]))
            if not most_specific:
                raise exceptions.ConfigurationError(
                    'Invalid configuration: multiple matches for identifier {}, but none of them is most specific.'.
                    format(identifier)
                )
            return most_specific[0].use_cache
        if enable_matches:
            return True
        if disable_matches:
            return False
    return _CONFIG[ConfigKeys.DEFAULT.value]
示例#15
0
    def get_recommended_cutoffs(self,
                                *,
                                elements=None,
                                structure=None,
                                stringency=None,
                                unit=None):
        """Return tuple of recommended wavefunction and density cutoffs for the given elements or ``StructureData``.

        .. note:: at least one and only one of arguments ``elements`` or ``structure`` should be passed.

        :param elements: single or tuple of elements.
        :param structure: a ``StructureData`` node.
        :param stringency: optional stringency if different from the default.
        :param unit: string definition of a unit of energy as recognized by the ``UnitRegistry`` of the ``pint`` lib.
        :return: tuple of recommended wavefunction and density cutoff.
        :raises ValueError: if the requested stringency is not defined for this family.
        :raises ValueError: if optional unit specified is invalid.
        """
        if (elements is None
                and structure is None) or (elements is not None
                                           and structure is not None):
            raise ValueError(
                'at least one and only one of `elements` or `structure` should be defined'
            )

        type_check(elements, (tuple, str), allow_none=True)
        type_check(structure, StructureData, allow_none=True)

        if unit is not None:
            self.validate_cutoffs_unit(unit)

        if structure is not None:
            symbols = structure.get_symbols_set()
        elif isinstance(elements, tuple):
            symbols = elements
        else:
            symbols = (elements, )

        cutoffs_wfc = []
        cutoffs_rho = []
        cutoffs = self.get_cutoffs(stringency)

        for element in symbols:

            if unit is not None:
                current_unit = self.get_cutoffs_unit(stringency)
                values = {
                    k: U.Quantity(v, current_unit).to(unit).to_tuple()[0]
                    for k, v in cutoffs[element].items()
                }
            else:
                values = cutoffs[element]

            cutoffs_wfc.append(values['cutoff_wfc'])
            cutoffs_rho.append(values['cutoff_rho'])

        return (max(cutoffs_wfc), max(cutoffs_rho))
示例#16
0
 def __init__(self, backend):
     """
     :param backend: the backend
     """
     from . import backends
     type_check(backend, backends.Backend)
     self._backend = backend
     self.inner_to_outer_schema = dict()
     self.outer_to_inner_schema = dict()
示例#17
0
    def __init__(self, backend, label, user, description='', type_string=''):
        """Construct a new Django group"""
        type_check(user, users.DjangoUser)
        super(DjangoGroup, self).__init__(backend)

        self._dbmodel = utils.ModelWrapper(
            models.DbGroup(label=label,
                           description=description,
                           user=user.dbmodel,
                           type_string=type_string))
示例#18
0
def export_zip(entities=None, filename=None, use_compression=True, **kwargs):
    """Export in a zipped folder

    .. deprecated:: 1.2.1
        Support for the parameters `what` and `outfile` will be removed in `v2.0.0`.
        Please use `entities` and `filename` instead, respectively.

    :param entities: a list of entity instances; they can belong to different models/entities.
    :type entities: list

    :param filename: the filename (possibly including the absolute path) of the file on which to export.
    :type filename: str

    :param use_compression: Whether or not to compress the zip file.
    :type use_compression: bool
    """
    # Backwards-compatibility
    entities = deprecated_parameters(
        old={
            'name': 'what',
            'value': kwargs.pop('what', None)
        },
        new={
            'name': 'entities',
            'value': entities
        },
    )
    filename = deprecated_parameters(
        old={
            'name': 'outfile',
            'value': kwargs.pop('outfile', None)
        },
        new={
            'name': 'filename',
            'value': filename
        },
    )

    type_check(
        entities, (list, tuple, set),
        msg='`entities` must be specified and given as a list of AiiDA entities'
    )
    entities = list(entities)

    if type_check(filename, str, allow_none=True) is None:
        filename = 'export_data.aiida'

    with ZipFolder(filename, mode='w',
                   use_compression=use_compression) as folder:
        time_start = time.time()
        export_tree(entities=entities, folder=folder, **kwargs)
        time_end = time.time()

    return (time_start, time_end)
示例#19
0
    def parse_output(self, detailed_job_info, stdout, stderr):  # pylint: disable=inconsistent-return-statements
        """Parse the output of the scheduler.

        :param detailed_job_info: dictionary with the output returned by the `Scheduler.get_detailed_job_info` command.
            This should contain the keys `retval`, `stdout` and `stderr` corresponding to the return value, stdout and
            stderr returned by the accounting command executed for a specific job id.
        :param stdout: string with the output written by the scheduler to stdout
        :param stderr: string with the output written by the scheduler to stderr
        :return: None or an instance of `aiida.engine.processes.exit_code.ExitCode`
        :raises TypeError or ValueError: if the passed arguments have incorrect type or value
        """
        from aiida.engine import CalcJob

        type_check(detailed_job_info, dict)

        try:
            detailed_stdout = detailed_job_info['stdout']
        except KeyError:
            raise ValueError(
                'the `detailed_job_info` does not contain the required key `stdout`.'
            )

        type_check(detailed_stdout, str)

        # The format of the detailed job info should be a multiline string, where the first line is the header, with
        # the labels of the projected attributes. The following line should be the values of those attributes for the
        # entire job. Any additional lines correspond to those values for any additional tasks that were run.
        lines = detailed_stdout.splitlines()

        try:
            master = lines[1]
        except IndexError:
            raise ValueError(
                'the `detailed_job_info.stdout` contained less than two lines.'
            )

        attributes = master.split('|')

        # Pop the last element if it is empty. This happens if the `master` string just finishes with a pipe
        if not attributes[-1]:
            attributes.pop()

        if len(self._detailed_job_info_fields) != len(attributes):
            raise ValueError(
                'second line in `detailed_job_info.stdout` differs in length with schedulers `_detailed_job_info_fields'
            )

        data = dict(zip(self._detailed_job_info_fields, attributes))

        if data['State'] == 'OUT_OF_MEMORY':
            return CalcJob.exit_codes.ERROR_SCHEDULER_OUT_OF_MEMORY  # pylint: disable=no-member

        if data['State'] == 'TIMEOUT':
            return CalcJob.exit_codes.ERROR_SCHEDULER_OUT_OF_WALLTIME  # pylint: disable=no-member
示例#20
0
    def computer(self, computer):
        """Set the computer of this node.

        :param computer: a `BackendComputer`
        """
        type_check(computer, DjangoComputer, allow_none=True)

        if computer is not None:
            computer = computer.dbmodel

        self._dbmodel.dbcomputer = computer
示例#21
0
def close_progress_bar(leave=None):
    """Close instantiated progress bar"""
    global PROGRESS_BAR

    type_check(leave, bool, allow_none=True)

    if PROGRESS_BAR is not None:
        if leave is not None:
            PROGRESS_BAR.leave = leave
        PROGRESS_BAR.close()

    PROGRESS_BAR = None
示例#22
0
    def get_pseudos(self, structure):
        """Return the mapping of kind names on `UpfData` for the given structure.

        :param structure: the `StructureData` for which to return the corresponding `UpfData` mapping.
        :return: dictionary of kind name mapping `UpfData`
        :raises ValueError: if the family does not contain a `UpfData` for any of the elements of the given structure.
        """
        type_check(structure, StructureData)
        return {
            kind.name: self.get_pseudo(kind.symbol)
            for kind in structure.kinds
        }
示例#23
0
    def __init__(self, backend, computer, user):
        """Construct a new instance.

        :param computer: a :class:`aiida.orm.implementation.computers.BackendComputer` instance
        :param user: a :class:`aiida.orm.implementation.users.BackendUser` instance
        :return: an :class:`aiida.orm.implementation.authinfos.BackendAuthInfo` instance
        """
        from . import computers
        from . import users
        super(DjangoAuthInfo, self).__init__(backend)
        type_check(user, users.DjangoUser)
        type_check(computer, computers.DjangoComputer)
        self._dbmodel = utils.ModelWrapper(DbAuthInfo(dbcomputer=computer.dbmodel, aiidauser=user.dbmodel))
示例#24
0
    def __init__(self,
                 label=None,
                 user=None,
                 description='',
                 type_string=None,
                 backend=None):
        """
        Create a new group. Either pass a dbgroup parameter, to reload
        a group from the DB (and then, no further parameters are allowed),
        or pass the parameters for the Group creation.

        .. deprecated:: 1.2.0
            The parameter `type_string` will be removed in `v2.0.0` and is now determined automatically.

        :param label: The group label, required on creation
        :type label: str

        :param description: The group description (by default, an empty string)
        :type description: str

        :param user: The owner of the group (by default, the automatic user)
        :type user: :class:`aiida.orm.User`

        :param type_string: a string identifying the type of group (by default,
            an empty string, indicating an user-defined group.
        :type type_string: str
        """
        if not label:
            raise ValueError('Group label must be provided')

        if type_string is not None:
            message = '`type_string` is deprecated because it is determined automatically'
            warnings.warn(message)  # pylint: disable=no-member

        # If `type_string` is explicitly defined, override automatically determined `self._type_string`. This is
        # necessary for backwards compatibility.
        if type_string is not None:
            self._type_string = type_string

        type_string = self._type_string

        backend = backend or get_manager().get_backend()
        user = user or users.User.objects(backend).get_default()
        type_check(user, users.User)

        model = backend.groups.create(label=label,
                                      user=user.backend_entity,
                                      description=description,
                                      type_string=type_string)
        super().__init__(model)
示例#25
0
    def validate_cutoffs_unit(unit: str) -> None:
        """Validate the cutoffs unit.

        The unit should be a name that is recognized by the ``pint`` library to be a unit of energy.

        :raises ValueError: if an invalid unit is specified.
        """
        type_check(unit, str)

        if unit not in U:
            raise ValueError(f'`{unit}` is not a valid unit.')

        if not U.Quantity(1, unit).check('[energy]'):
            raise ValueError(f'`{unit}` is not a valid energy unit.')
示例#26
0
def generate_inputs(process_class: engine.Process,
                    protocol: Dict,
                    code: orm.Code,
                    structure: StructureData,
                    override: Dict[str, Any] = None) -> Dict[str, Any]:
    """Generate the input parameters for the given workchain type for a given code and structure.

    The override argument can be used to pass a dictionary with values for specific inputs that should override the
    defaults. This dictionary should have the same nested structure as the final input dictionary would have for the
    workchain submission.

    :param process_class: process class, either calculation or workchain,
        i.e. ``AbinitCalculation`` or ``AbinitBaseWorkChain``
    :param protocol: the protocol based on which to choose input parameters
    :param code: the code or code name to use
    :param magnetism: the type of magnetisation to be used
    :param initial_mag: value for the initial magnetisation along the z axis.
    :param soc: whether or not to use spin-orbit coupling
    :param structure: the structure
    :param override: a dictionary to override specific inputs
    :return: input dictionary
    """
    # pylint: disable=too-many-arguments,unused-argument
    from aiida.common.lang import type_check

    AbinitCalculation = plugins.CalculationFactory('abinit')  # pylint: disable=invalid-name
    AbinitBaseWorkChain = plugins.WorkflowFactory('abinit.base')  # pylint: disable=invalid-name

    type_check(structure, orm.StructureData)

    if not isinstance(code, orm.Code):
        try:
            code = orm.load_code(code)
        except (exceptions.MultipleObjectsError,
                exceptions.NotExistent) as exception:
            raise ValueError('could not load the code {}: {}'.format(
                code, exception))

    if process_class == AbinitCalculation:
        protocol = protocol['abinit']
        dictionary = generate_inputs_calculation(protocol, code, structure,
                                                 override)
    elif process_class == AbinitBaseWorkChain:
        protocol = protocol['base']
        dictionary = generate_inputs_base(protocol, code, structure, override)
    else:
        raise NotImplementedError(
            'process class {} is not supported'.format(process_class))

    return dictionary
示例#27
0
    def from_backend_entity(cls, backend_entity):
        """
        Construct an entity from a backend entity instance

        :param backend_entity: the backend entity

        :return: an AiiDA entity instance
        """
        from . import implementation

        type_check(backend_entity, implementation.BackendEntity)
        entity = cls.__new__(cls)
        entity.init_from_backend(backend_entity)
        call_with_super_check(entity.initialize)
        return entity
示例#28
0
    def __init__(self, backend, label, user, description='', type_string=''):
        """
        Construct a new SQLA group

        :param backend: the backend to use
        :param label: the group label
        :param user: the owner of the group
        :param description: an optional group description
        :param type_string: an optional type for the group to contain
        """
        type_check(user, users.SqlaUser)
        super().__init__(backend)

        dbgroup = DbGroup(label=label, description=description, user=user.dbmodel, type_string=type_string)
        self._dbmodel = utils.ModelWrapper(dbgroup)
示例#29
0
    def from_dbmodel(cls, dbmodel, backend):
        """
        Create a DjangoEntity from the corresponding db model class

        :param dbmodel: the model to create the entity from
        :param backend: the corresponding backend
        :return: the Django entity
        """
        from .backend import SqlaBackend  # pylint: disable=cyclic-import
        cls._class_check()
        type_check(dbmodel, cls.MODEL_CLASS)
        type_check(backend, SqlaBackend)
        entity = cls.__new__(cls)
        super(SqlaModelEntity, entity).__init__(backend)
        entity._dbmodel = utils.ModelWrapper(dbmodel)  # pylint: disable=protected-access
        return entity
示例#30
0
    def can_run_on(self, computer):
        """
        Return True if this code can run on the given computer, False otherwise.

        Local codes can run on any machine; remote codes can run only on the machine
        on which they reside.

        TODO: add filters to mask the remote machines on which a local code can run.
        """
        from aiida import orm
        from aiida.common.lang import type_check

        if self.is_local():
            return True

        type_check(computer, orm.Computer)
        return computer.id == self.get_remote_computer().id