Exemplo n.º 1
0
    def _import_with_migrate(filename,
                             tempdir=temp_dir,
                             import_kwargs=None,
                             try_migration=True):
        from click import echo
        from aiida.tools.importexport import import_data
        from aiida.tools.importexport import EXPORT_VERSION, IncompatibleArchiveVersionError
        # these are only availbale after aiida >= 1.5.0, maybe rely on verdi import instead
        from aiida.tools.importexport import detect_archive_type
        from aiida.tools.importexport.archive.migrators import get_migrator
        from aiida.tools.importexport.common.config import ExportFileFormat
        if import_kwargs is None:
            import_kwargs = _DEFAULT_IMPORT_KWARGS
        archive_path = filename

        try:
            import_data(archive_path, **import_kwargs)
        except IncompatibleArchiveVersionError as exception:
            #raise ValueError
            if try_migration:
                echo(
                    f'incompatible version detected for {archive_path}, trying migration'
                )
                migrator = get_migrator(
                    detect_archive_type(archive_path))(archive_path)
                archive_path = migrator.migrate(EXPORT_VERSION,
                                                None,
                                                out_compression='none',
                                                work_dir=tempdir)
                import_data(archive_path, **import_kwargs)
Exemplo n.º 2
0
    def test_partial_migrations(self, core_archive, tmp_path):
        """Test migrations from a specific version (0.3) to other versions."""
        filepath_archive = get_archive_file('export_v0.3_simple.aiida',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.3')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        with pytest.raises(TypeError, match='version must be a string'):
            migrator.migrate(0.2, tmp_path / 'v02.aiida')

        with pytest.raises(ArchiveMigrationError,
                           match='No migration pathway available'):
            migrator.migrate('0.2', tmp_path / 'v02.aiida')

        # same version migration
        out_path = migrator.migrate('0.3', tmp_path / 'v03.aiida')
        # if no migration performed the output path is None
        assert out_path is None

        # newer version migration
        migrator.migrate('0.5', tmp_path / 'v05.aiida')
        assert (tmp_path / 'v05.aiida').exists()

        metadata = read_json_files(tmp_path / 'v05.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.5')
Exemplo n.º 3
0
    def test_no_node_migration(self, tmp_path, external_archive):
        """Test migration of archive file that has no Node entities."""
        input_file = get_archive_file('export_v0.3_no_Nodes.aiida',
                                      **external_archive)
        output_file = tmp_path / 'output_file.aiida'

        migrator_cls = get_migrator(detect_archive_type(input_file))
        migrator = migrator_cls(input_file)

        # Perform the migration
        migrator.migrate(newest_version, output_file)

        # Load the migrated file
        import_data(output_file)

        # Check known entities
        assert orm.QueryBuilder().append(orm.Node).count() == 0
        computer_query = orm.QueryBuilder().append(orm.Computer,
                                                   project=['uuid'])
        assert computer_query.all(flat=True) == [
            '4f33c6fd-b624-47df-9ffb-a58f05d323af'
        ]
        user_query = orm.QueryBuilder().append(orm.User, project=['email'])
        assert set(user_query.all(flat=True)) == {
            orm.User.objects.get_default().email, 'aiida@localhost'
        }
Exemplo n.º 4
0
def _import_archive(archive: str, web_based: bool, import_kwargs: dict, try_migration: bool):
    """Perform the archive import.

    :param archive: the path or URL to the archive
    :param web_based: If the archive needs to be downloaded first
    :param import_kwargs: keyword arguments to pass to the import function
    :param try_migration: whether to try a migration if the import raises IncompatibleArchiveVersionError

    """
    from aiida.common.folders import SandboxFolder
    from aiida.tools.importexport import (
        detect_archive_type, EXPORT_VERSION, import_data, IncompatibleArchiveVersionError
    )
    from aiida.tools.importexport.archive.migrators import get_migrator

    with SandboxFolder() as temp_folder:

        archive_path = archive

        if web_based:
            echo.echo_info(f'downloading archive: {archive}')
            try:
                response = urllib.request.urlopen(archive)
            except Exception as exception:
                _echo_exception(f'downloading archive {archive} failed', exception)
            temp_folder.create_file_from_filelike(response, 'downloaded_archive.zip')
            archive_path = temp_folder.get_abs_path('downloaded_archive.zip')
            echo.echo_success('archive downloaded, proceeding with import')

        echo.echo_info(f'starting import: {archive}')
        try:
            import_data(archive_path, **import_kwargs)
        except IncompatibleArchiveVersionError as exception:
            if try_migration:

                echo.echo_info(f'incompatible version detected for {archive}, trying migration')
                try:
                    migrator = get_migrator(detect_archive_type(archive_path))(archive_path)
                    archive_path = migrator.migrate(
                        EXPORT_VERSION, None, out_compression='none', work_dir=temp_folder.abspath
                    )
                except Exception as exception:
                    _echo_exception(f'an exception occurred while migrating the archive {archive}', exception)

                echo.echo_info('proceeding with import of migrated archive')
                try:
                    import_data(archive_path, **import_kwargs)
                except Exception as exception:
                    _echo_exception(
                        f'an exception occurred while trying to import the migrated archive {archive}', exception
                    )
            else:
                _echo_exception(f'an exception occurred while trying to import the archive {archive}', exception)
        except Exception as exception:
            _echo_exception(f'an exception occurred while trying to import the archive {archive}', exception)

        echo.echo_success(f'imported archive {archive}')
Exemplo n.º 5
0
    def test_full_migration(self, tmp_path, core_archive):
        """Test a migration from the first to newest archive version."""

        filepath_archive = get_archive_file('export_v0.1_simple.aiida',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.1')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        migrator.migrate(newest_version, tmp_path / 'out.aiida')
        assert detect_archive_type(tmp_path / 'out.aiida') == 'zip'
        metadata = read_json_files(tmp_path / 'out.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)
Exemplo n.º 6
0
def migrate(input_file, output_file, force, silent, in_place, archive_format,
            version, verbosity):
    """Migrate an export archive to a more recent format version.

    .. deprecated:: 1.5.0
        Support for the --silent flag, replaced by --verbosity

    """
    from aiida.common.log import override_log_formatter_context
    from aiida.common.progress_reporter import set_progress_bar_tqdm, set_progress_reporter
    from aiida.tools.importexport import detect_archive_type, EXPORT_VERSION
    from aiida.tools.importexport.archive.migrators import get_migrator, MIGRATE_LOGGER

    if silent is True:
        echo.echo_deprecated(
            'the --silent option is deprecated, use --verbosity')

    if in_place:
        if output_file:
            echo.echo_critical(
                'output file specified together with --in-place flag')
        output_file = input_file
        force = True
    elif not output_file:
        echo.echo_critical(
            'no output file specified. Please add --in-place flag if you would like to migrate in place.'
        )

    if verbosity in ['DEBUG', 'INFO']:
        set_progress_bar_tqdm(leave=(verbosity == 'DEBUG'))
    else:
        set_progress_reporter(None)
    MIGRATE_LOGGER.setLevel(verbosity)

    if version is None:
        version = EXPORT_VERSION

    migrator_cls = get_migrator(detect_archive_type(input_file))
    migrator = migrator_cls(input_file)

    try:
        with override_log_formatter_context('%(message)s'):
            migrator.migrate(version,
                             output_file,
                             force=force,
                             out_compression=archive_format)
    except Exception as error:  # pylint: disable=broad-except
        if verbosity == 'DEBUG':
            raise
        echo.echo_critical(
            'failed to migrate the archive file (use `--verbosity DEBUG` to see traceback): '
            f'{error.__class__.__name__}:{error}')

    if verbosity in ['DEBUG', 'INFO']:
        echo.echo_success(f'migrated the archive to version {version}')
Exemplo n.º 7
0
    def test_tar_migration(self, tmp_path, core_archive):
        """Test a migration using a tar compressed in/out file."""

        filepath_archive = get_archive_file('export_v0.2_simple.tar.gz',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.2')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        migrator.migrate(newest_version,
                         tmp_path / 'out.aiida',
                         out_compression='tar.gz')
        assert detect_archive_type(tmp_path / 'out.aiida') == 'tar.gz'
        metadata = read_json_files(tmp_path / 'out.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)
Exemplo n.º 8
0
    def test_wrong_versions(self, core_archive, tmp_path, version):
        """Test correct errors are raised if archive files have wrong version numbers"""
        filepath_archive = get_archive_file('export_v0.1_simple.aiida',
                                            **core_archive)
        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        with pytest.raises(ArchiveMigrationError,
                           match='No migration pathway available'):
            migrator.migrate(version, tmp_path / 'out.aiida')
        assert not (tmp_path / 'out.aiida').exists()
Exemplo n.º 9
0
def inspect(archive, version, data, meta_data):
    """Inspect contents of an exported archive without importing it.

    By default a summary of the archive contents will be printed. The various options can be used to change exactly what
    information is displayed.

    .. deprecated:: 1.5.0
        Support for the --data flag

    """
    import dataclasses
    from aiida.tools.importexport import CorruptArchive, detect_archive_type, get_reader

    reader_cls = get_reader(detect_archive_type(archive))

    with reader_cls(archive) as reader:
        try:
            if version:
                echo.echo(reader.export_version)
            elif data:
                # data is an internal implementation detail
                echo.echo_deprecated(
                    '--data is deprecated and will be removed in v2.0.0')
                echo.echo_dictionary(reader._get_data())  # pylint: disable=protected-access
            elif meta_data:
                echo.echo_dictionary(dataclasses.asdict(reader.metadata))
            else:
                statistics = {
                    'Version aiida': reader.metadata.aiida_version,
                    'Version format': reader.metadata.export_version,
                    'Computers': reader.entity_count('Computer'),
                    'Groups': reader.entity_count('Group'),
                    'Links': reader.link_count,
                    'Nodes': reader.entity_count('Node'),
                    'Users': reader.entity_count('User'),
                }
                if reader.metadata.conversion_info:
                    statistics['Conversion info'] = '\n'.join(
                        reader.metadata.conversion_info)

                echo.echo(tabulate.tabulate(statistics.items()))
        except CorruptArchive as exception:
            echo.echo_critical(f'corrupt archive: {exception}')
Exemplo n.º 10
0
    def test_migrate_to_newest(self, external_archive, tmp_path, filename,
                               nodes):
        """Test migrations from old archives to newest version."""
        filepath_archive = get_archive_file(filename, **external_archive)

        out_path = tmp_path / 'out.aiida'

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)
        out_path = migrator.migrate(newest_version,
                                    out_path) or filepath_archive

        metadata = read_json_files(out_path, names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)

        # Load the migrated file
        import_data(out_path)

        # count nodes
        archive_node_count = orm.QueryBuilder().append(orm.Node).count()
        assert archive_node_count == nodes

        # Verify that CalculationNodes have non-empty attribute dictionaries
        calc_query = orm.QueryBuilder().append(orm.CalculationNode)
        for [calculation] in calc_query.iterall():
            assert isinstance(calculation.attributes, dict)
            assert len(calculation.attributes) > 0

        # Verify that the StructureData nodes maintained their (same) label, cell, and kinds
        struct_query = orm.QueryBuilder().append(orm.StructureData)
        assert struct_query.count() == 2
        for structure in struct_query.all(flat=True):
            assert structure.label == ''
            assert structure.cell == [[4, 0, 0], [0, 4, 0], [0, 0, 4]]

        known_kinds = [
            {
                'name': 'Ba',
                'mass': 137.327,
                'weights': [1],
                'symbols': ['Ba']
            },
            {
                'name': 'Ti',
                'mass': 47.867,
                'weights': [1],
                'symbols': ['Ti']
            },
            {
                'name': 'O',
                'mass': 15.9994,
                'weights': [1],
                'symbols': ['O']
            },
        ]
        kind_query = orm.QueryBuilder().append(orm.StructureData,
                                               project=['attributes.kinds'])
        for kinds in kind_query.all(flat=True):
            assert len(kinds) == len(known_kinds)
            for kind in kinds:
                assert kind in known_kinds

        # Check that there is a StructureData that is an input of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.StructureData, tag='structure')
        builder.append(orm.CalculationNode, with_incoming='structure')
        assert len(builder.all()) > 0

        # Check that there is a RemoteData that is the output of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.CalculationNode, tag='parent')
        builder.append(orm.RemoteData, with_incoming='parent')
        assert len(builder.all()) > 0