Пример #1
0
def migrate(input_file, output_file, force, silent, in_place, archive_format,
            version, verbosity):
    """Migrate an export archive to a more recent format version.

    .. deprecated:: 1.5.0
        Support for the --silent flag, replaced by --verbosity

    """
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport import detect_archive_type, EXPORT_VERSION
    from aiida.tools.importexport.archive.migrators import get_migrator, MIGRATE_LOGGER

    if silent is True:
        echo.echo_deprecated(
            'the --silent option is deprecated, use --verbosity')

    if in_place:
        if output_file:
            echo.echo_critical(
                'output file specified together with --in-place flag')
        output_file = input_file
        force = True
    elif not output_file:
        echo.echo_critical(
            'no output file specified. Please add --in-place flag if you would like to migrate in place.'
        )

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    MIGRATE_LOGGER.setLevel(verbosity)

    if version is None:
        version = EXPORT_VERSION

    migrator_cls = get_migrator(detect_archive_type(input_file))
    migrator = migrator_cls(input_file)

    try:
        with override_log_formatter_context('%(message)s'):
            migrator.migrate(version,
                             output_file,
                             force=force,
                             out_compression=archive_format)
    except Exception as error:  # pylint: disable=broad-except
        if verbosity == 'DEBUG':
            raise
        echo.echo_critical(
            'failed to migrate the archive file (use `--verbosity DEBUG` to see traceback): '
            f'{error.__class__.__name__}:{error}')

    if verbosity in ['DEBUG', 'INFO']:
        echo.echo_success(f'migrated the archive to version {version}')
Пример #2
0
def cmd_import(
    ctx, archives, webpages, group, extras_mode_existing, extras_mode_new, comment_mode, migration, non_interactive,
    verbosity
):
    """Import data from an AiiDA archive file.

    The archive can be specified by its relative or absolute file path, or its HTTP URL.
    """
    # pylint: disable=unused-argument
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport.dbimport.utils import IMPORT_LOGGER
    from aiida.tools.importexport.archive.migrators import MIGRATE_LOGGER

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    IMPORT_LOGGER.setLevel(verbosity)
    MIGRATE_LOGGER.setLevel(verbosity)

    all_archives = _gather_imports(archives, webpages)

    # Preliminary sanity check
    if not all_archives:
        echo.echo_critical('no valid exported archives were found')

    # Shared import key-word arguments
    import_kwargs = {
        'group': group,
        'extras_mode_existing': ExtrasImportCode[extras_mode_existing].value,
        'extras_mode_new': extras_mode_new,
        'comment_mode': comment_mode,
    }

    with override_log_formatter_context('%(message)s'):
        for archive, web_based in all_archives:
            _import_archive(archive, web_based, import_kwargs, migration)
Пример #3
0
def create(output_file, codes, computers, groups, nodes, archive_format, force,
           input_calc_forward, input_work_forward, create_backward,
           return_backward, call_calc_backward, call_work_backward,
           include_comments, include_logs, verbosity):
    """
    Export subsets of the provenance graph to file for sharing.

    Besides Nodes of the provenance graph, you can export Groups, Codes, Computers, Comments and Logs.

    By default, the archive file will include not only the entities explicitly provided via the command line but also
    their provenance, according to the rules outlined in the documentation.
    You can modify some of those rules using options of this command.
    """
    # pylint: disable=too-many-branches
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport import export, ExportFileFormat, EXPORT_LOGGER
    from aiida.tools.importexport.common.exceptions import ArchiveExportError

    entities = []

    if codes:
        entities.extend(codes)

    if computers:
        entities.extend(computers)

    if groups:
        entities.extend(groups)

    if nodes:
        entities.extend(nodes)

    kwargs = {
        'input_calc_forward': input_calc_forward,
        'input_work_forward': input_work_forward,
        'create_backward': create_backward,
        'return_backward': return_backward,
        'call_calc_backward': call_calc_backward,
        'call_work_backward': call_work_backward,
        'include_comments': include_comments,
        'include_logs': include_logs,
        'overwrite': force,
    }

    if archive_format == 'zip':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'use_compression': True}})
    elif archive_format == 'zip-uncompressed':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'use_compression': False}})
    elif archive_format == 'zip-lowmemory':
        export_format = ExportFileFormat.ZIP
        kwargs.update({'writer_init': {'cache_zipinfo': True}})
    elif archive_format == 'tar.gz':
        export_format = ExportFileFormat.TAR_GZIPPED
    elif archive_format == 'null':
        export_format = 'null'

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    EXPORT_LOGGER.setLevel(verbosity)

    try:
        with override_log_formatter_context('%(message)s'):
            export(entities,
                   filename=output_file,
                   file_format=export_format,
                   **kwargs)
    except ArchiveExportError as exception:
        echo.echo_critical(
            f'failed to write the archive file. Exception: {exception}')
    else:
        echo.echo_success(f'wrote the export archive file to {output_file}')
Пример #4
0
def extract_zip(infile,
                folder,
                nodes_export_subfolder=None,
                check_files=('data.json', 'metadata.json'),
                **kwargs):
    """Extract the nodes to be imported from a zip file.

    :param infile: file path
    :type infile: str

    :param folder: a temporary folder used to extract the file tree
    :type folder: :py:class:`~aiida.common.folders.SandboxFolder`

    :param nodes_export_subfolder: name of the subfolder for AiiDA nodes
    :type nodes_export_subfolder: str

    :param check_files: list of files to check are present

    :param silent: suppress progress bar
    :type silent: bool

    :raises TypeError: if parameter types are not respected
    :raises `~aiida.tools.importexport.common.exceptions.CorruptArchive`: if the archive misses files or files have
        incorrect formats
    """
    warnings.warn(
        'extract_zip function is deprecated and will be removed in AiiDA v2.0.0, '
        'use extract_tree in the archive-path package instead',
        AiidaDeprecationWarning)  # pylint: disable=no-member

    if nodes_export_subfolder:
        if not isinstance(nodes_export_subfolder, str):
            raise TypeError('nodes_export_subfolder must be a string')
    else:
        nodes_export_subfolder = NODES_EXPORT_SUBFOLDER

    if not kwargs.get('silent', False):
        set_progress_bar_tqdm(unit='files')
    else:
        set_progress_reporter(None)

    data_files = set()

    try:
        with zipfile.ZipFile(infile, 'r', allowZip64=True) as handle:

            members = handle.namelist()

            if not members:
                raise CorruptArchive('no files detected in archive')

            with get_progress_reporter()(total=len(members)) as progress:

                for membername in members:

                    progress.update()

                    # Check that we are only exporting nodes within the subfolder!
                    # better check such that there are no .. in the
                    # path; use probably the folder limit checks
                    if membername in check_files:
                        data_files.add(membername)
                    elif not membername.startswith(nodes_export_subfolder +
                                                   os.sep):
                        continue

                    _update_description(membername, progress)

                    handle.extract(path=folder.abspath, member=membername)

    except zipfile.BadZipfile:
        raise ValueError(
            'The input file format for import is not valid (not a zip file)')

    for name in check_files:
        if name not in data_files:
            raise CorruptArchive('Archive missing required file:f {name}')
Пример #5
0
def extract_tar(infile,
                folder,
                nodes_export_subfolder=None,
                check_files=('data.json', 'metadata.json'),
                **kwargs):
    """
    Extract the nodes to be imported from a (possibly zipped) tar file.

    :param infile: file path
    :type infile: str

    :param folder: a temporary fodler used to extract the file tree
    :type folder: :py:class:`~aiida.common.folders.SandboxFolder`

    :param nodes_export_subfolder: name of the subfolder for AiiDA nodes
    :type nodes_export_subfolder: str

    :param check_files: list of files to check are present

    :param silent: suppress progress bar
    :type silent: bool

    :raises TypeError: if parameter types are not respected
    :raises `~aiida.tools.importexport.common.exceptions.CorruptArchive`: if the archive misses files or files have
        incorrect formats
    """
    warnings.warn(
        'extract_tar function is deprecated and will be removed in AiiDA v2.0.0, '
        'use extract_tree in the archive-path package instead',
        AiidaDeprecationWarning)  # pylint: disable=no-member

    if nodes_export_subfolder:
        if not isinstance(nodes_export_subfolder, str):
            raise TypeError('nodes_export_subfolder must be a string')
    else:
        nodes_export_subfolder = NODES_EXPORT_SUBFOLDER

    if not kwargs.get('silent', False):
        set_progress_bar_tqdm(unit='files')
    else:
        set_progress_reporter(None)

    data_files = set()

    try:
        with tarfile.open(infile, 'r:*', format=tarfile.PAX_FORMAT) as handle:

            members = handle.getmembers()

            if len(members) == 1 and members[0].size == 0:
                raise CorruptArchive('no files detected in archive')

            with get_progress_reporter()(total=len(members)) as progress:

                for member in members:

                    progress.update()

                    if member.isdev():
                        # safety: skip if character device, block device or FIFO
                        print(
                            f'WARNING, device found inside the import file: {member.name}',
                            file=sys.stderr)
                        continue
                    if member.issym() or member.islnk():
                        # safety: in export, I set dereference=True therefore
                        # there should be no symbolic or hard links.
                        print(
                            f'WARNING, symlink found inside the import file: {member.name}',
                            file=sys.stderr)
                        continue
                    # Check that we are only exporting nodes within the subfolder!
                    # better check such that there are no .. in the
                    # path; use probably the folder limit checks
                    if member.name in check_files:
                        data_files.add(member.name)
                    elif not member.name.startswith(nodes_export_subfolder +
                                                    os.sep):
                        continue

                    _update_description(member.name, progress)

                    handle.extract(path=folder.abspath, member=member)
    except tarfile.ReadError:
        raise ValueError(
            'The input file format for import is not valid (not a tar file)')

    for name in check_files:
        if name not in data_files:
            raise CorruptArchive('Archive missing required file:f {name}')