Exemplo n.º 1
0
def migrate(input_file, output_file, force, silent, in_place, archive_format,
            version, verbosity):
    """Migrate an export archive to a more recent format version.

    .. deprecated:: 1.5.0
        Support for the --silent flag, replaced by --verbosity

    """
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport import detect_archive_type, EXPORT_VERSION
    from aiida.tools.importexport.archive.migrators import get_migrator, MIGRATE_LOGGER

    if silent is True:
        echo.echo_deprecated(
            'the --silent option is deprecated, use --verbosity')

    if in_place:
        if output_file:
            echo.echo_critical(
                'output file specified together with --in-place flag')
        output_file = input_file
        force = True
    elif not output_file:
        echo.echo_critical(
            'no output file specified. Please add --in-place flag if you would like to migrate in place.'
        )

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    MIGRATE_LOGGER.setLevel(verbosity)

    if version is None:
        version = EXPORT_VERSION

    migrator_cls = get_migrator(detect_archive_type(input_file))
    migrator = migrator_cls(input_file)

    try:
        with override_log_formatter_context('%(message)s'):
            migrator.migrate(version,
                             output_file,
                             force=force,
                             out_compression=archive_format)
    except Exception as error:  # pylint: disable=broad-except
        if verbosity == 'DEBUG':
            raise
        echo.echo_critical(
            'failed to migrate the archive file (use `--verbosity DEBUG` to see traceback): '
            f'{error.__class__.__name__}:{error}')

    if verbosity in ['DEBUG', 'INFO']:
        echo.echo_success(f'migrated the archive to version {version}')
Exemplo n.º 2
0
def group_delete(group, clear, delete_nodes, dry_run, force, verbose,
                 **traversal_rules):
    """Delete a group and (optionally) the nodes it contains."""
    from aiida.common.log import override_log_formatter_context
    from aiida.tools import delete_group_nodes, DELETE_LOGGER
    from aiida import orm

    if clear:
        warnings.warn('`--clear` is deprecated and no longer has any effect.',
                      AiidaDeprecationWarning)  # pylint: disable=no-member

    label = group.label
    klass = group.__class__.__name__

    verbosity = logging.DEBUG if verbose else logging.INFO
    DELETE_LOGGER.setLevel(verbosity)

    if not (force or dry_run):
        click.confirm(f'Are you sure to delete {klass}<{label}>?', abort=True)
    elif dry_run:
        echo.echo_info(f'Would have deleted {klass}<{label}>.')

    if delete_nodes:

        def _dry_run_callback(pks):
            if not pks or force:
                return False
            echo.echo_warning(
                f'YOU ARE ABOUT TO DELETE {len(pks)} NODES! THIS CANNOT BE UNDONE!'
            )
            return not click.confirm('Shall I continue?', abort=True)

        with override_log_formatter_context('%(message)s'):
            _, nodes_deleted = delete_group_nodes([group.pk],
                                                  dry_run=dry_run
                                                  or _dry_run_callback,
                                                  **traversal_rules)
        if not nodes_deleted:
            # don't delete the group if the nodes were not deleted
            return

    if not dry_run:
        orm.Group.objects.delete(group.pk)
        echo.echo_success(f'{klass}<{label}> deleted.')
Exemplo n.º 3
0
def cmd_import(
    ctx, archives, webpages, group, extras_mode_existing, extras_mode_new, comment_mode, migration, non_interactive,
    verbosity
):
    """Import data from an AiiDA archive file.

    The archive can be specified by its relative or absolute file path, or its HTTP URL.
    """
    # pylint: disable=unused-argument
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport.dbimport.utils import IMPORT_LOGGER
    from aiida.tools.importexport.archive.migrators import MIGRATE_LOGGER

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    IMPORT_LOGGER.setLevel(verbosity)
    MIGRATE_LOGGER.setLevel(verbosity)

    all_archives = _gather_imports(archives, webpages)

    # Preliminary sanity check
    if not all_archives:
        echo.echo_critical('no valid exported archives were found')

    # Shared import key-word arguments
    import_kwargs = {
        'group': group,
        'extras_mode_existing': ExtrasImportCode[extras_mode_existing].value,
        'extras_mode_new': extras_mode_new,
        'comment_mode': comment_mode,
    }

    with override_log_formatter_context('%(message)s'):
        for archive, web_based in all_archives:
            _import_archive(archive, web_based, import_kwargs, migration)
Exemplo n.º 4
0
def node_delete(identifier, dry_run, verbose, force, **traversal_rules):
    """Delete nodes from the provenance graph.

    This will not only delete the nodes explicitly provided via the command line, but will also include
    the nodes necessary to keep a consistent graph, according to the rules outlined in the documentation.
    You can modify some of those rules using options of this command.
    """
    from aiida.common.log import override_log_formatter_context
    from aiida.orm.utils.loaders import NodeEntityLoader
    from aiida.tools import delete_nodes, DELETE_LOGGER

    verbosity = logging.DEBUG if verbose else logging.INFO
    DELETE_LOGGER.setLevel(verbosity)

    pks = []

    for obj in identifier:
        # we only load the node if we need to convert from a uuid/label
        try:
            pks.append(int(obj))
        except ValueError:
            pks.append(NodeEntityLoader.load_entity(obj).pk)

    def _dry_run_callback(pks):
        if not pks or force:
            return False
        echo.echo_warning(
            f'YOU ARE ABOUT TO DELETE {len(pks)} NODES! THIS CANNOT BE UNDONE!'
        )
        return not click.confirm('Shall I continue?', abort=True)

    with override_log_formatter_context('%(message)s'):
        _, was_deleted = delete_nodes(pks,
                                      dry_run=dry_run or _dry_run_callback,
                                      **traversal_rules)

    if was_deleted:
        echo.echo_success('Finished deletion.')
Exemplo n.º 5
0
def create(output_file, codes, computers, groups, nodes, archive_format, force,
           input_calc_forward, input_work_forward, create_backward,
           return_backward, call_calc_backward, call_work_backward,
           include_comments, include_logs, verbosity):
    """
    Export subsets of the provenance graph to file for sharing.

    Besides Nodes of the provenance graph, you can export Groups, Codes, Computers, Comments and Logs.

    By default, the archive file will include not only the entities explicitly provided via the command line but also
    their provenance, according to the rules outlined in the documentation.
    You can modify some of those rules using options of this command.
    """
    # pylint: disable=too-many-branches
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport import export, ExportFileFormat, EXPORT_LOGGER
    from aiida.tools.importexport.common.exceptions import ArchiveExportError

    entities = []

    if codes:
        entities.extend(codes)

    if computers:
        entities.extend(computers)

    if groups:
        entities.extend(groups)

    if nodes:
        entities.extend(nodes)

    kwargs = {
        'input_calc_forward': input_calc_forward,
        'input_work_forward': input_work_forward,
        'create_backward': create_backward,
        'return_backward': return_backward,
        'call_calc_backward': call_calc_backward,
        'call_work_backward': call_work_backward,
        'include_comments': include_comments,
        'include_logs': include_logs,
        'overwrite': force,
    }

    if archive_format == 'zip':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'use_compression': True}})
    elif archive_format == 'zip-uncompressed':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'use_compression': False}})
    elif archive_format == 'zip-lowmemory':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'cache_zipinfo': True}})
    elif archive_format == 'tar.gz':
        export_format = ExportFileFormat.TAR_GZIPPED
    elif archive_format == 'null':
        export_format = 'null'

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    EXPORT_LOGGER.setLevel(verbosity)

    try:
        with override_log_formatter_context('%(message)s'):
            export(entities,
                   filename=output_file,
                   file_format=export_format,
                   **kwargs)
    except ArchiveExportError as exception:
        echo.echo_critical(
            f'failed to write the archive file. Exception: {exception}')
    else:
        echo.echo_success(f'wrote the export archive file to {output_file}')