Exemplo n.º 1
0
def comment_show(user, nodes):
    """Show the comments of one or multiple nodes."""
    for node in nodes:
        msg = '* Comments for Node<{}>'.format(node.pk)
        echo.echo('*' * len(msg))
        echo.echo(msg)
        echo.echo('*' * len(msg))

        all_comments = node.get_comments()

        if user is not None:
            comments = [comment for comment in all_comments if comment.user.email == user.email]

            if not comments:
                valid_users = ', '.join(set(comment.user.email for comment in all_comments))
                echo.echo_warning('no comments found for user {}'.format(user))
                echo.echo_info('valid users found for Node<{}>: {}'.format(node.pk, valid_users))

        else:
            comments = all_comments

        for comment in comments:
            comment_msg = [
                'Comment<{}> for Node<{}> by {}'.format(comment.id, node.pk, comment.user.email),
                'Created on {}'.format(timezone.localtime(comment.ctime).strftime('%Y-%m-%d %H:%M')),
                'Last modified on {}'.format(timezone.localtime(comment.mtime).strftime('%Y-%m-%d %H:%M')),
                '\n{}\n'.format(comment.content),
            ]
            echo.echo('\n'.join(comment_msg))

        if not comments:
            echo.echo_info('no comments found')
Exemplo n.º 2
0
def profile_list():
    """Display a list of all available profiles."""

    try:
        config = get_config()
    except (exceptions.MissingConfigurationError,
            exceptions.ConfigurationError) as exception:
        # This can happen for a fresh install and the `verdi setup` has not yet been run. In this case it is still nice
        # to be able to see the configuration directory, for instance for those who have set `AIIDA_PATH`. This way
        # they can at least verify that it is correctly set.
        from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER
        echo.echo_info('configuration folder: {}'.format(AIIDA_CONFIG_FOLDER))
        echo.echo_critical(str(exception))
    else:
        echo.echo_info('configuration folder: {}'.format(config.dirpath))

    if not config.profiles:
        echo.echo_warning(
            'no profiles configured: run `verdi setup` to create one')
    else:
        sort = lambda profile: profile.name
        highlight = lambda profile: profile.name == config.default_profile_name
        echo.echo_formatted_list(config.profiles, ['name'],
                                 sort=sort,
                                 highlight=highlight)
Exemplo n.º 3
0
def print_last_process_state_change(process_type=None):
    """
    Print the last time that a process of the specified type has changed its state.
    This function will also print a warning if the daemon is not running.

    :param process_type: optional process type for which to get the latest state change timestamp.
        Valid process types are either 'calculation' or 'work'.
    """
    from aiida.cmdline.utils.echo import echo_info, echo_warning
    from aiida.common import timezone
    from aiida.common.utils import str_timedelta
    from aiida.engine.daemon.client import get_daemon_client
    from aiida.engine.utils import get_process_state_change_timestamp

    client = get_daemon_client()

    timestamp = get_process_state_change_timestamp(process_type)

    if timestamp is None:
        echo_info('last time an entry changed state: never')
    else:
        timedelta = timezone.delta(timestamp, timezone.now())
        formatted = format_local_time(timestamp,
                                      format_str='at %H:%M:%S on %Y-%m-%d')
        relative = str_timedelta(timedelta,
                                 negative_to_zero=True,
                                 max_num_fields=1)
        echo_info('last time an entry changed state: {} ({})'.format(
            relative, formatted))

    if not client.is_daemon_running:
        echo_warning('the daemon is not running', bold=True)
Exemplo n.º 4
0
    def from_file(cls, filepath):
        """Instantiate a configuration object from the contents of a given file.

        .. note:: if the filepath does not exist an empty file will be created with the current default configuration
            and will be written to disk. If the filepath does already exist but contains a configuration with an
            outdated schema, the content will be migrated and then written to disk.

        :param filepath: the absolute path to the configuration file
        :return: `Config` instance
        """
        from aiida.cmdline.utils import echo
        from .migrations import check_and_migrate_config, config_needs_migrating

        try:
            with open(filepath, 'r', encoding='utf8') as handle:
                config = json.load(handle)
        except FileNotFoundError:
            config = Config(filepath, check_and_migrate_config({}))
            config.store()
        else:
            migrated = False

            # If the configuration file needs to be migrated first create a specific backup so it can easily be reverted
            if config_needs_migrating(config):
                migrated = True
                echo.echo_warning(f'current configuration file `{filepath}` is outdated and will be migrated')
                filepath_backup = cls._backup(filepath)
                echo.echo_warning(f'original backed up to `{filepath_backup}`')

            config = Config(filepath, check_and_migrate_config(config))

            if migrated:
                config.store()

        return config
Exemplo n.º 5
0
def detect_invalid_nodes():
    """Scan the database for invalid nodes."""
    from tabulate import tabulate

    from aiida.manage.database.integrity.sql.nodes import INVALID_NODE_SELECT_STATEMENTS
    from aiida.manage.manager import get_manager

    integrity_violated = False

    backend = get_manager().get_backend()

    for check in INVALID_NODE_SELECT_STATEMENTS:

        result = backend.execute_prepared_statement(check.sql,
                                                    check.parameters)

        if result:
            integrity_violated = True
            echo.echo_warning(f'{check.message}:\n')
            echo.echo(tabulate(result, headers=check.headers))

    if not integrity_violated:
        echo.echo_success('no integrity violations detected')
    else:
        echo.echo_critical('one or more integrity violations detected')
Exemplo n.º 6
0
def exportfamily(folder, group_name):
    """
    Export a pseudopotential family into a folder.
    Call without parameters to get some help.
    """
    import os
    from aiida.common.exceptions import NotExistent
    from aiida.orm import DataFactory

    # pylint: disable=invalid-name
    UpfData = DataFactory('upf')
    try:
        group = UpfData.get_upf_group(group_name)
    except NotExistent:
        echo.echo_critical("upf family {} not found".format(group_name))

    # pylint: disable=protected-access
    for node in group.nodes:
        dest_path = os.path.join(folder, node.filename)
        if not os.path.isfile(dest_path):
            with open(dest_path, 'w') as dest:
                with node._get_folder_pathsubfolder.open(
                        node.filename) as source:
                    dest.write(source.read())
        else:
            echo.echo_warning("File {} is already present in the "
                              "destination folder".format(node.filename))
Exemplo n.º 7
0
 def _dry_run_callback(pks):
     if not pks or force:
         return False
     echo.echo_warning(
         f'YOU ARE ABOUT TO DELETE {len(pks)} NODES! THIS CANNOT BE UNDONE!'
     )
     return not click.confirm('Shall I continue?', abort=True)
Exemplo n.º 8
0
    def from_file(cls, filepath):
        """Instantiate a configuration object from the contents of a given file.

        .. note:: if the filepath does not exist an empty file will be created with the default configuration.

        :param filepath: the absolute path to the configuration file
        :return: `Config` instance
        """
        from aiida.cmdline.utils import echo
        from .migrations import check_and_migrate_config, config_needs_migrating

        try:
            with io.open(filepath, 'r', encoding='utf8') as handle:
                config = json.load(handle)
        except (IOError, OSError):
            config = Config(filepath, check_and_migrate_config({}))
            config.store()
        else:
            # If the configuration file needs to be migrated first create a specific backup so it can easily be reverted
            if config_needs_migrating(config):
                echo.echo_warning(
                    'current configuration file `{}` is outdated and will be migrated'
                    .format(filepath))
                filepath_backup = cls._backup(filepath)
                echo.echo_warning(
                    'original backed up to `{}`'.format(filepath_backup))

            config = Config(filepath, check_and_migrate_config(config))

        return config
Exemplo n.º 9
0
def work_watch(calculations):
    """
    Watch the state transitions for work calculations
    """
    from kiwipy import BroadcastFilter
    from aiida.work.rmq import create_communicator

    def _print(body, sender, subject, correlation_id):
        echo.echo("pk={}, subject={}, body={}, correlation_id={}".format(
            sender, subject, body, correlation_id))

    communicator = create_communicator()

    for calculation in calculations:

        if calculation.is_terminated:
            echo.echo_warning('Calculation<{}> is already terminated'.format(
                calculation.pk))
        communicator.add_broadcast_subscriber(
            BroadcastFilter(_print, sender=calculation.pk))

    try:
        communicator. await ()
    except (SystemExit, KeyboardInterrupt):
        try:
            communicator.disconnect()
        except RuntimeError:
            pass
Exemplo n.º 10
0
    def __init__(self, name, attributes, from_config=False):
        if not isinstance(attributes, collections.abc.Mapping):
            raise TypeError('attributes should be a mapping but is {}'.format(
                type(attributes)))

        self._name = name
        self._attributes = {}

        for internal_key, value in attributes.items():
            if from_config:
                try:
                    internal_key = self._map_config_to_internal[internal_key]
                except KeyError:
                    from aiida.cmdline.utils import echo
                    echo.echo_warning(
                        'removed unsupported key `{}` with value `{}` from profile `{}`'
                        .format(internal_key, value, name))
                    continue
            setattr(self, internal_key, value)

        # Create a default UUID if not specified
        if self.uuid is None:
            from uuid import uuid4
            self.uuid = uuid4().hex

        # Currently, whether a profile is a test profile is solely determined by its name starting with 'test_'
        self._test_profile = bool(self.name.startswith('test_'))
Exemplo n.º 11
0
def check_worker_load(active_slots):
    """
    Check if the percentage usage of the daemon worker slots exceeds a threshold.
    If it does, print a warning.

    The purpose of this check is to warn the user if they are close to running out of worker slots
    which could lead to their processes becoming stuck indefinitely.

    :param active_slots: the number of currently active worker slots
    """
    from aiida.common.exceptions import CircusCallError
    from aiida.cmdline.utils import echo
    from aiida.manage.external.rmq import _RMQ_TASK_PREFETCH_COUNT

    warning_threshold = 0.9  # 90%

    slots_per_worker = _RMQ_TASK_PREFETCH_COUNT

    try:
        active_workers = get_num_workers()
    except CircusCallError:
        echo.echo_critical(
            'Could not contact Circus to get the number of active workers')

    if active_workers is not None:
        available_slots = active_workers * slots_per_worker
        percent_load = (active_slots / available_slots)
        if percent_load > warning_threshold:
            echo.echo('')  # New line
            echo.echo_warning(
                '{:.0f}% of the available daemon worker slots have been used!'.
                format(percent_load * 100))
            echo.echo_warning(
                "Increase the number of workers with 'verdi daemon incr'.\n")
Exemplo n.º 12
0
def delete_stale_pid_file(client):
    """Delete a potentially state daemon PID file.

    Checks if the PID contatined in the circus PID file (circus-{PROFILE_NAME}.pid) matches a valid running `verdi`
    process. If it does not, the PID file is stale and will be removed.

    This situation can arise if a system is shut down suddenly and so the process is killed but the PID file is not
    deleted in time. When the `get_daemon_pid()` method is called, an incorrect PID is returned. Alternatively, another
    process or the user may have meddled with the PID file in some way, corrupting it.

    :param client: the `DaemonClient`
    """
    import os
    import psutil

    class StartCircusNotFound(Exception):
        """For when 'start-circus' is not found in the ps command."""

    pid = client.get_daemon_pid()

    if pid is not None:
        try:
            process = psutil.Process(pid)
            if _START_CIRCUS_COMMAND not in process.cmdline():
                raise StartCircusNotFound(
                )  # Also this is a case in which the process is not there anymore
        except (psutil.AccessDenied, psutil.NoSuchProcess,
                StartCircusNotFound):
            echo.echo_warning(
                'Deleted apparently stale daemon PID file as its associated process<{}> does not exist anymore'
                .format(pid))
            if os.path.isfile(client.circus_pid_file):
                os.remove(client.circus_pid_file)
Exemplo n.º 13
0
def _try_import(migration_performed, file_to_import, archive, group, migration, non_interactive, **kwargs):
    """Utility function for `verdi import` to try to import archive

    :param migration_performed: Boolean to determine the exception message to throw for
        `~aiida.tools.importexport.common.exceptions.IncompatibleArchiveVersionError`
    :param file_to_import: Absolute path, including filename, of file to be migrated.
    :param archive: Filename of archive to be migrated, and later attempted imported.
    :param group: AiiDA Group into which the import will be associated.
    :param migration: Whether or not to force migration of archive, if needed.
    :param non_interactive: Whether or not the user should be asked for input for any reason.
    :param kwargs: Key-word-arguments that _must_ contain:
        * `'extras_mode_existing'`: `import_data`'s `'extras_mode_existing'` keyword, determining import rules for
        Extras.
        * `'extras_mode_new'`: `import_data`'s `'extras_mode_new'` keyword, determining import rules for Extras.
        * `'comment_mode'`: `import_data`'s `'comment_mode'` keyword, determining import rules for Comments.
    """
    from aiida.tools.importexport import import_data, IncompatibleArchiveVersionError

    # Checks
    expected_keys = ['extras_mode_existing', 'extras_mode_new', 'comment_mode']
    for key in expected_keys:
        if key not in kwargs:
            raise ValueError("{} needed for utility function '{}' to use in 'import_data'".format(key, '_try_import'))

    # Initialization
    migrate_archive = False

    try:
        import_data(file_to_import, group, **kwargs)
    except IncompatibleArchiveVersionError as exception:
        if migration_performed:
            # Migration has been performed, something is still wrong
            crit_message = '{} has been migrated, but it still cannot be imported.\n{}'.format(archive, exception)
            echo.echo_critical(crit_message)
        else:
            # Migration has not yet been tried.
            if migration:
                # Confirm migration
                echo.echo_warning(str(exception).splitlines()[0])
                if non_interactive:
                    migrate_archive = True
                else:
                    migrate_archive = click.confirm(
                        'Do you want to try and migrate {} to the newest export file version?\n'
                        'Note: This will not change your current file.'.format(archive),
                        default=True,
                        abort=True
                    )
            else:
                # Abort
                echo.echo_critical(str(exception))
    except Exception:
        echo.echo_error('an exception occurred while importing the archive {}'.format(archive))
        echo.echo(traceback.format_exc())
        if not non_interactive:
            click.confirm('do you want to continue?', abort=True)
    else:
        echo.echo_success('imported archive {}'.format(archive))

    return migrate_archive
Exemplo n.º 14
0
def print_list_res(qb_query, show_owner):
    """Print list of codes."""
    # pylint: disable=invalid-name
    if qb_query.count > 0:
        for tuple_ in qb_query.all():
            if len(tuple_) == 3:
                (pk, label, useremail) = tuple_
                computername = None
            elif len(tuple_) == 4:
                (pk, label, useremail, computername) = tuple_
            else:
                echo.echo_warning("Wrong tuple size")
                return

            if show_owner:
                owner_string = " ({})".format(useremail)
            else:
                owner_string = ""
            if computername is None:
                computernamestring = ""
            else:
                computernamestring = "@{}".format(computername)
            echo.echo("* pk {} - {}{}{}".format(pk, label, computernamestring,
                                                owner_string))
    else:
        echo.echo("# No codes found matching the specified criteria.")
Exemplo n.º 15
0
def cif_content(data):
    """Show the content of the CIF file."""
    for node in data:
        try:
            echo.echo(node.get_content())
        except IOError as exception:
            echo.echo_warning(
                'could not read the content for CifData<{}>: {}'.format(
                    node.pk, str(exception)))
Exemplo n.º 16
0
def dump_pseudo(sym, name, tags, output_format, data):
    """
    Print specified Pseudopotentials
    """

    from aiida.orm.querybuilder import QueryBuilder

    from aiida_gaussian_datatypes.pseudopotential.data import Pseudopotential

    writers = {
        "cp2k": Pseudopotential.to_cp2k,
    }

    if data:
        # if explicit nodes where given the only thing left is to make sure no filters are present
        if sym or name or tags:
            raise click.UsageError(
                "can not specify node IDs and filters at the same time")
    else:
        query = QueryBuilder()
        query.append(Pseudopotential, project=["*"])

        if sym:
            query.add_filter(Pseudopotential,
                             {"attributes.element": {
                                 "==": sym
                             }})

        if name:
            query.add_filter(Pseudopotential,
                             {"attributes.aliases": {
                                 "contains": [name]
                             }})

        if tags:
            query.add_filter(Pseudopotential,
                             {"attributes.tags": {
                                 "contains": tags
                             }})

        if not query.count():
            echo.echo_warning("No Gaussian Pseudopotential found.",
                              err=echo.is_stdout_redirected())
            return

        data = [pseudo for pseudo, in query.iterall()
                ]  # query always returns a tuple, unpack it here

    for pseudo in data:
        if echo.is_stdout_redirected():
            echo.echo_report("Dumping {}/{} ({})...".format(
                pseudo.name, pseudo.element, pseudo.uuid),
                             err=True)

        writers[output_format](pseudo, sys.stdout)
Exemplo n.º 17
0
def downgrade():
    """drop the hashes also when downgrading"""
    conn = op.get_bind()

    # Invalidate all the hashes & inform the user
    echo_warning(
        'Invalidating all the hashes of all the nodes. Please run verdi rehash',
        bold=True)
    statement = text("""UPDATE db_dbnode SET extras = extras #- '{""" +
                     _HASH_EXTRA_KEY + """}'::text[];""")
    conn.execute(statement)
Exemplo n.º 18
0
    def handle_invalid(self, message):
        """Handle an incoming invalid configuration dictionary.

        The current content of the configuration file will be written to a backup file.

        :param message: a string message to echo with describing the infraction
        """
        from aiida.cmdline.utils import echo
        filepath_backup = self._backup(self.filepath)
        echo.echo_warning(message)
        echo.echo_warning(f'backup of the original config file written to: `{filepath_backup}`')
Exemplo n.º 19
0
def group_remove_nodes(group, nodes, clear, force):
    """Remove nodes from a group."""
    from aiida.orm import QueryBuilder, Group, Node

    label = group.label
    klass = group.__class__.__name__

    if nodes and clear:
        echo.echo_critical(
            'Specify either the `--clear` flag to remove all nodes or the identifiers of the nodes you want to remove.'
        )

    if not force:

        if nodes:
            node_pks = [node.pk for node in nodes]

            query = QueryBuilder()
            query.append(Group, filters={'id': group.pk}, tag='group')
            query.append(Node,
                         with_group='group',
                         filters={'id': {
                             'in': node_pks
                         }},
                         project='id')

            group_node_pks = query.all(flat=True)

            if not group_node_pks:
                echo.echo_critical(
                    f'None of the specified nodes are in {klass}<{label}>.')

            if len(node_pks) > len(group_node_pks):
                node_pks = set(node_pks).difference(set(group_node_pks))
                echo.echo_warning(
                    f'{len(node_pks)} nodes with PK {node_pks} are not in {klass}<{label}>.'
                )

            message = f'Are you sure you want to remove {len(group_node_pks)} nodes from {klass}<{label}>?'

        elif clear:
            message = f'Are you sure you want to remove ALL the nodes from {klass}<{label}>?'
        else:
            echo.echo_critical(
                f'No nodes were provided for removal from {klass}<{label}>.')

        click.confirm(message, abort=True)

    if clear:
        group.clear()
    else:
        group.remove_nodes(nodes)
Exemplo n.º 20
0
def user_list():
    """Show a list of all users."""
    from aiida.orm import User

    default_user = User.objects.get_default()

    if default_user is None:
        echo.echo_warning('no default user has been configured')

    attributes = ['email', 'first_name', 'last_name']
    sort = lambda user: user.email
    highlight = lambda x: x.email == default_user.email if default_user else None
    echo.echo_formatted_list(User.objects.all(), attributes, sort=sort, highlight=highlight)
Exemplo n.º 21
0
    def structure_similarity(matcher, structure_i, structure_j):
        """Return whether structure_i and structure_j are similar according to the given matcher.

        :param matcher: instance of pymatgen.StructureMatcher
        :param structure_i: instance of StructureData
        :param structure_j: instance of StructureData
        :return: integer, 1 if the matcher deems the structures equal, 0 otherwise
        """
        try:
            return int(matcher.fit(structure_i.get_pymatgen_structure(), structure_j.get_pymatgen_structure()))
        except TypeError:
            if verbose:
                echo.echo_warning('could not match the structures {} and {}'.format(structure_i.uuid, structure_j.uuid))
            return 0
Exemplo n.º 22
0
def dump_basisset(sym, name, tags, output_format, data):
    """
    Print specified Basis Sets
    """

    from aiida.orm.querybuilder import QueryBuilder

    from aiida_gaussian_datatypes.basisset.data import BasisSet

    writers = {
        "cp2k": BasisSet.to_cp2k,
    }

    if data:
        # if explicit nodes where given the only thing left is to make sure no filters are present
        if sym or name or tags:
            raise click.UsageError(
                "can not specify node IDs and filters at the same time")
    else:
        query = QueryBuilder()
        query.append(BasisSet, project=['*'])

        if sym:
            query.add_filter(BasisSet, {'attributes.element': {'==': sym}})

        if name:
            query.add_filter(BasisSet,
                             {'attributes.aliases': {
                                 'contains': [name]
                             }})

        if tags:
            query.add_filter(BasisSet, {'attributes.tags': {'contains': tags}})

        if not query.count():
            echo.echo_warning("No Gaussian Basis Sets found.",
                              err=echo.is_stdout_redirected())
            return

        data = [bset for bset, in query.iterall()
                ]  # query always returns a tuple, unpack it here

    for bset in data:
        if echo.is_stdout_redirected():
            echo.echo_report("Dumping {}/{} ({})...".format(
                bset.name, bset.element, bset.uuid),
                             err=True)

        writers[output_format](bset, sys.stdout)
Exemplo n.º 23
0
def drop_hashes(conn):  # pylint: disable=unused-argument
    """Drop hashes of nodes.

    Print warning only if the DB actually contains nodes.
    """
    n_nodes = conn.execute(
        text("""SELECT count(*) FROM db_dbnode;""")).fetchall()[0][0]
    if n_nodes > 0:
        echo.echo_warning(
            'Invalidating the hashes of all nodes. Please run "verdi rehash".',
            bold=True)

    statement = text("""UPDATE db_dbnode SET extras = extras #- '{""" +
                     _HASH_EXTRA_KEY + """}'::text[];""")
    conn.execute(statement)
Exemplo n.º 24
0
def upf_exportfamily(folder, group):
    """
    Export a pseudopotential family into a folder.
    Call without parameters to get some help.
    """
    if group.is_empty:
        echo.echo_critical('Group<{}> contains no pseudos'.format(group.label))

    for node in group.nodes:
        dest_path = os.path.join(folder, node.filename)
        if not os.path.isfile(dest_path):
            with io.open(dest_path, 'w', encoding='utf8') as handle:
                handle.write(node.get_content())
        else:
            echo.echo_warning('File {} is already present in the destination folder'.format(node.filename))
Exemplo n.º 25
0
def _echo_exception(msg: str, exception, warn_only: bool = False):
    """Correctly report and exception.

    :param msg: The message prefix
    :param exception: the exception raised
    :param warn_only: If True only print a warning, otherwise calls sys.exit with a non-zero exit status

    """
    from aiida.tools.importexport import IMPORT_LOGGER
    message = f'{msg}: {exception.__class__.__name__}: {str(exception)}'
    if warn_only:
        echo.echo_warning(message)
    else:
        IMPORT_LOGGER.debug('%s', traceback.format_exc())
        echo.echo_critical(message)
Exemplo n.º 26
0
def psml_listfamilies(elements, with_description):
    """
    List all PSML families that exist in the database.

    THIS COMMAND IS DEPRECATED AND WILL BE REMOVED IN aiida-siesta v2.0. Its substitute command is `aiida-pseudo
    list`. Since the pseudo management is now based on a new system, the families listed here will not appear
    running the new command. It is suggested to export the families into a folder (`verdi data psml exportfamily
    folder_name family_label`), delete the group corresponding to the family (`verdi group delete family_label`),
    and install the family again (`aiida-pseudo install family folder_name family_label -P pseudo.psml`).
    """
    from aiida import orm
    from aiida.plugins import DataFactory
    from aiida_siesta.groups.pseudos import PsmlFamily

    PsmlData = DataFactory('siesta.psml')  # pylint: disable=invalid-name
    query = orm.QueryBuilder()
    query.append(PsmlData, tag='psmldata')
    if elements is not None:
        query.add_filter(PsmlData, {'attributes.element': {'in': elements}})
    query.append(PsmlFamily,
                 with_node='psmldata',
                 tag='group',
                 project=['label', 'description'])

    query.distinct()
    if query.count() > 0:
        for res in query.dict():
            group_label = res.get('group').get('label')
            group_desc = res.get('group').get('description')
            query = orm.QueryBuilder()
            query.append(orm.Group,
                         tag='thisgroup',
                         filters={'label': {
                             'like': group_label
                         }})
            query.append(PsmlData, project=['id'], with_group='thisgroup')

            if with_description:
                description_string = ': {}'.format(group_desc)
            else:
                description_string = ''

            echo.echo_success('* {} [{} pseudos]{}'.format(
                group_label, query.count(), description_string))

    else:
        echo.echo_warning('No valid PSML pseudopotential family found.')
Exemplo n.º 27
0
def group_copy(source_group, destination_group):
    """Duplicate a group.

    More in detail, add all nodes from the source group to the destination group.
    Note that the destination group may not exist."""
    from aiida import orm

    dest_group, created = orm.Group.objects.get_or_create(label=destination_group)

    # Issue warning if destination group is not empty and get user confirmation to continue
    if not created and not dest_group.is_empty:
        echo.echo_warning('Destination group<{}> already exists and is not empty.'.format(dest_group.label))
        click.confirm('Do you wish to continue anyway?', abort=True)

    # Copy nodes
    dest_group.add_nodes(list(source_group.nodes))
    echo.echo_success('Nodes copied from group<{}> to group<{}>'.format(source_group.label, dest_group.label))
def export_workflow_data(apps, _):
    """Export existing legacy workflow data to a JSON file."""
    from tempfile import NamedTemporaryFile

    DbWorkflow = apps.get_model('db', 'DbWorkflow')
    DbWorkflowData = apps.get_model('db', 'DbWorkflowData')
    DbWorkflowStep = apps.get_model('db', 'DbWorkflowStep')

    count_workflow = DbWorkflow.objects.count()
    count_workflow_data = DbWorkflowData.objects.count()
    count_workflow_step = DbWorkflowStep.objects.count()

    # Nothing to do if all tables are empty
    if count_workflow == 0 and count_workflow_data == 0 and count_workflow_step == 0:
        return

    if not configuration.PROFILE.is_test_profile:
        echo.echo('\n')
        echo.echo_warning(
            'The legacy workflow tables contain data but will have to be dropped to continue.'
        )
        echo.echo_warning(
            'If you continue, the content will be dumped to a JSON file, before dropping the tables.'
        )
        echo.echo_warning(
            'This serves merely as a reference and cannot be used to restore the database.'
        )
        echo.echo_warning(
            'If you want a proper backup, make sure to dump the full database and backup your repository'
        )
        if not click.confirm('Are you sure you want to continue',
                             default=True):
            sys.exit(1)

    delete_on_close = configuration.PROFILE.is_test_profile

    data = {
        'workflow':
        serializers.serialize('json', DbWorkflow.objects.all()),
        'workflow_data':
        serializers.serialize('json', DbWorkflowData.objects.all()),
        'workflow_step':
        serializers.serialize('json', DbWorkflowStep.objects.all()),
    }

    with NamedTemporaryFile(prefix='legacy-workflows',
                            suffix='.json',
                            dir='.',
                            delete=delete_on_close,
                            mode='wb') as handle:
        filename = handle.name
        json.dump(data, handle)

    # If delete_on_close is False, we are running for the user and add additional message of file location
    if not delete_on_close:
        echo.echo_info(f'Exported workflow data to {filename}')
Exemplo n.º 29
0
def listfamilies(elements, with_description):
    """
    Print on screen the list of upf families installed
    """
    from aiida.orm import DataFactory
    from aiida.orm.data.upf import UPFGROUP_TYPE

    # pylint: disable=invalid-name
    UpfData = DataFactory('upf')
    from aiida.orm.querybuilder import QueryBuilder
    from aiida.orm.group import Group
    qb = QueryBuilder()
    qb.append(UpfData, tag='upfdata')
    if elements is not None:
        qb.add_filter(UpfData, {'attributes.element': {'in': elements}})
    qb.append(Group,
              group_of='upfdata',
              tag='group',
              project=["name", "description"],
              filters={"type": {
                  '==': UPFGROUP_TYPE
              }})

    qb.distinct()
    if qb.count() > 0:
        for res in qb.dict():
            group_name = res.get("group").get("name")
            group_desc = res.get("group").get("description")
            qb = QueryBuilder()
            qb.append(Group,
                      tag='thisgroup',
                      filters={"name": {
                          'like': group_name
                      }})
            qb.append(UpfData, project=["id"], member_of='thisgroup')

            if with_description:
                description_string = ": {}".format(group_desc)
            else:
                description_string = ""

            echo.echo_success("* {} [{} pseudos]{}".format(
                group_name, qb.count(), description_string))

    else:
        echo.echo_warning("No valid UPF pseudopotential family found.")
Exemplo n.º 30
0
def devel_check_undesired_imports():
    """Check that verdi does not import python modules it shouldn't.

    Note: The blacklist was taken from the list of packages in the 'atomic_tools' extra but can be extended.
    """
    loaded_modules = 0

    for modulename in [
            'seekpath', 'CifFile', 'ase', 'pymatgen', 'spglib', 'pymysql'
    ]:
        if modulename in sys.modules:
            echo.echo_warning(f'Detected loaded module "{modulename}"')
            loaded_modules += 1

    if loaded_modules > 0:
        echo.echo_critical(f'Detected {loaded_modules} unwanted modules')
    echo.echo_success('no issues detected')