Example #1
0
def migrate_archive(input_file, output_file, silent=True):
    """Migrate contents using `migrate_recursively`
    This is essentially similar to `verdi export migrate`.
    However, since this command may be disabled, this function simulates it and keeps the tests working.

    :param input_file: filename with full path for archive to be migrated
    :param output_file: filename with full path for archive to be created after migration
    """
    from aiida.tools.importexport.migration import migrate_recursively

    # Unpack archive, migrate, and re-pack archive
    with SandboxFolder(sandbox_in_repo=False) as folder:
        if zipfile.is_zipfile(input_file):
            extract_zip(input_file, folder, silent=silent)
        elif tarfile.is_tarfile(input_file):
            extract_tar(input_file, folder, silent=silent)
        else:
            raise ValueError(
                'invalid file format, expected either a zip archive or gzipped tarball'
            )

        try:
            with open(folder.get_abs_path('data.json'), 'r',
                      encoding='utf8') as fhandle:
                data = json.load(fhandle)
            with open(folder.get_abs_path('metadata.json'),
                      'r',
                      encoding='utf8') as fhandle:
                metadata = json.load(fhandle)
        except IOError:
            raise NotExistent(
                'export archive does not contain the required file {}'.format(
                    fhandle.filename))

        # Migrate
        migrate_recursively(metadata, data, folder)

        # Write json files
        with open(folder.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle, indent=4)

        with open(folder.get_abs_path('metadata.json'), 'wb') as fhandle:
            json.dump(metadata, fhandle, indent=4)

        # Pack archive
        compression = zipfile.ZIP_DEFLATED
        with zipfile.ZipFile(output_file,
                             mode='w',
                             compression=compression,
                             allowZip64=True) as archive:
            src = folder.abspath
            for dirpath, dirnames, filenames in os.walk(src):
                relpath = os.path.relpath(dirpath, src)
                for filename in dirnames + filenames:
                    real_src = os.path.join(dirpath, filename)
                    real_dest = os.path.join(relpath, filename)
                    archive.write(real_src, real_dest)
Example #2
0
    def test_wrong_versions(self):
        """Test correct errors are raised if export files have wrong version numbers"""
        from aiida.tools.importexport.migration import MIGRATE_FUNCTIONS

        wrong_versions = ['0.0', '0.1.0', '0.99']
        old_versions = list(MIGRATE_FUNCTIONS.keys())
        legal_versions = old_versions + [newest_version]
        wrong_version_metadatas = []
        for version in wrong_versions:
            metadata = {'export_version': version}
            wrong_version_metadatas.append(metadata)

        # Make sure the "wrong_versions" are wrong
        for version in wrong_versions:
            self.assertNotIn(
                version,
                legal_versions,
                msg=
                "'{}' was not expected to be a legal version, legal version: {}"
                .format(version, legal_versions))

        # Make sure migrate_recursively throws an ArchiveMigrationError
        for metadata in wrong_version_metadatas:
            with self.assertRaises(ArchiveMigrationError):
                new_version = migrate_recursively(metadata, {}, None)

                self.assertIsNone(
                    new_version,
                    msg='migrate_recursively should not return anything, '
                    "hence the 'return' should be None, but instead it is {}".
                    format(new_version))
Example #3
0
    def test_migrate_newest_version(self):
        """
        Test critical message and SystemExit is raised, when an export file with the newest export version is migrated
        """
        # Initialization
        metadata = {'export_version': newest_version}

        # Check
        with self.assertRaises(SystemExit) as exception:

            with Capturing(capture_stderr=True):
                new_version = migrate_recursively(metadata, {}, None)

            self.assertIn(
                'Critical: Your export file is already at the newest export version {}'.format(
                    metadata['export_version']
                ),
                exception.exception,
                msg="Expected a critical statement that the export version '{}' is the newest export version '{}', "
                'instead got {}'.format(metadata['export_version'], newest_version, exception.exception)
            )
            self.assertIsNone(
                new_version,
                msg='migrate_recursively should not return anything, '
                "hence the 'return' should be None, but instead it is {}".format(new_version)
            )
Example #4
0
    def test_migrate_recursively_specific_version(self):
        """Test the `version` argument of the `migrate_recursively` function."""
        filepath_archive = get_archive_file('export_v0.3_simple.aiida',
                                            **self.core_archive)

        with Archive(filepath_archive) as archive:

            # Incorrect type
            with self.assertRaises(TypeError):
                migrate_recursively(archive.meta_data,
                                    archive.data,
                                    None,
                                    version=0.2)

            # Backward migrations are not supported
            with self.assertRaises(ArchiveMigrationError):
                migrate_recursively(archive.meta_data,
                                    archive.data,
                                    None,
                                    version='0.2')

            migrate_recursively(archive.meta_data,
                                archive.data,
                                None,
                                version='0.3')

            migrated_version = '0.5'
            version = migrate_recursively(archive.meta_data,
                                          archive.data,
                                          None,
                                          version=migrated_version)
            self.assertEqual(version, migrated_version)
Example #5
0
def migrate(input_file, output_file, force, silent, archive_format):
    # pylint: disable=too-many-locals,too-many-statements,too-many-branches
    """
    Migrate an old export archive file to the most recent format.
    """
    import tarfile
    import zipfile

    from aiida.common import json
    from aiida.common.folders import SandboxFolder
    from aiida.tools.importexport import migration, extract_zip, extract_tar

    if os.path.exists(output_file) and not force:
        echo.echo_critical('the output file already exists')

    with SandboxFolder(sandbox_in_repo=False) as folder:

        if zipfile.is_zipfile(input_file):
            extract_zip(input_file, folder, silent=silent)
        elif tarfile.is_tarfile(input_file):
            extract_tar(input_file, folder, silent=silent)
        else:
            echo.echo_critical('invalid file format, expected either a zip archive or gzipped tarball')

        try:
            with io.open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
                data = json.load(fhandle)
            with io.open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle:
                metadata = json.load(fhandle)
        except IOError:
            echo.echo_critical('export archive does not contain the required file {}'.format(fhandle.filename))

        old_version = migration.verify_metadata_version(metadata)
        new_version = migration.migrate_recursively(metadata, data, folder)

        with io.open(folder.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle, indent=4)

        with io.open(folder.get_abs_path('metadata.json'), 'wb') as fhandle:
            json.dump(metadata, fhandle)

        if archive_format in ['zip', 'zip-uncompressed']:
            compression = zipfile.ZIP_DEFLATED if archive_format == 'zip' else zipfile.ZIP_STORED
            with zipfile.ZipFile(output_file, mode='w', compression=compression, allowZip64=True) as archive:
                src = folder.abspath
                for dirpath, dirnames, filenames in os.walk(src):
                    relpath = os.path.relpath(dirpath, src)
                    for filename in dirnames + filenames:
                        real_src = os.path.join(dirpath, filename)
                        real_dest = os.path.join(relpath, filename)
                        archive.write(real_src, real_dest)
        elif archive_format == 'tar.gz':
            with tarfile.open(output_file, 'w:gz', format=tarfile.PAX_FORMAT, dereference=True) as archive:
                archive.add(folder.abspath, arcname='')

        if not silent:
            echo.echo_success('migrated the archive from version {} to {}'.format(old_version, new_version))
Example #6
0
    def test_migrate_recursively(self):
        """Test function 'migrate_recursively'"""
        import io
        import tarfile
        import zipfile

        from aiida.common.exceptions import NotExistent
        from aiida.common.folders import SandboxFolder
        from aiida.common.json import load as jsonload
        from aiida.tools.importexport.common.archive import extract_tar, extract_zip

        # Get metadata.json and data.json as dicts from v0.1 file archive
        # Cannot use 'get_json_files' for 'export_v0.1_simple.aiida',
        # because we need to pass the SandboxFolder to 'migrate_recursively'
        dirpath_archive = get_archive_file('export_v0.1_simple.aiida',
                                           **self.core_archive)

        with SandboxFolder(sandbox_in_repo=False) as folder:
            if zipfile.is_zipfile(dirpath_archive):
                extract_zip(dirpath_archive, folder, silent=True)
            elif tarfile.is_tarfile(dirpath_archive):
                extract_tar(dirpath_archive, folder, silent=True)
            else:
                raise ValueError(
                    'invalid file format, expected either a zip archive or gzipped tarball'
                )

            try:
                with io.open(folder.get_abs_path('data.json'),
                             'r',
                             encoding='utf8') as fhandle:
                    data = jsonload(fhandle)
                with io.open(folder.get_abs_path('metadata.json'),
                             'r',
                             encoding='utf8') as fhandle:
                    metadata = jsonload(fhandle)
            except IOError:
                raise NotExistent(
                    'export archive does not contain the required file {}'.
                    format(fhandle.filename))

            verify_metadata_version(metadata, version='0.1')

            # Migrate to newest version
            new_version = migrate_recursively(metadata, data, folder)
            verify_metadata_version(metadata, version=newest_version)
            self.assertEqual(new_version, newest_version)
Example #7
0
    def test_wrong_versions(self):
        """Test correct errors are raised if export files have wrong version numbers"""
        from aiida.tools.importexport.migration import MIGRATE_FUNCTIONS

        # Initialization
        wrong_versions = ['0.0', '0.1.0', '0.99']
        old_versions = list(MIGRATE_FUNCTIONS.keys())
        legal_versions = old_versions + [newest_version]
        wrong_version_metadatas = []
        for version in wrong_versions:
            metadata = {'export_version': version}
            wrong_version_metadatas.append(metadata)

        # Checks
        # Make sure the "wrong_versions" are wrong
        for version in wrong_versions:
            self.assertNotIn(
                version,
                legal_versions,
                msg=
                "'{}' was not expected to be a legal version, legal version: {}"
                .format(version, legal_versions))

        # Make sure migrate_recursively throws a critical message and raises SystemExit
        for metadata in wrong_version_metadatas:
            with self.assertRaises(SystemExit) as exception:
                with Capturing(capture_stderr=True):
                    new_version = migrate_recursively(metadata, {}, None)

                self.assertIn(
                    'Critical: Cannot migrate from version {}'.format(
                        metadata['export_version']),
                    exception.exception,
                    msg=
                    "Expected a critical statement for the wrong export version '{}', "
                    'instead got {}'.format(metadata['export_version'],
                                            exception.exception))
                self.assertIsNone(
                    new_version,
                    msg='migrate_recursively should not return anything, '
                    "hence the 'return' should be None, but instead it is {}".
                    format(new_version))
Example #8
0
    def test_migrate_newest_version(self):
        """Test that  migrating the latest version runs without complaints."""
        metadata = {'export_version': newest_version}

        new_version = migrate_recursively(metadata, {}, None)
        self.assertEqual(new_version, newest_version)
Example #9
0
def migrate(input_file, output_file, force, silent, in_place, archive_format, version):
    # pylint: disable=too-many-locals,too-many-statements,too-many-branches
    """Migrate an export archive to a more recent format version."""
    import tarfile
    import zipfile

    from aiida.common import json
    from aiida.common.folders import SandboxFolder
    from aiida.tools.importexport import migration, extract_zip, extract_tar, ArchiveMigrationError, EXPORT_VERSION

    if version is None:
        version = EXPORT_VERSION

    if in_place:
        if output_file:
            echo.echo_critical('output file specified together with --in-place flag')
        tempdir = tempfile.TemporaryDirectory()
        output_file = os.path.join(tempdir.name, 'archive.aiida')
    elif not output_file:
        echo.echo_critical(
            'no output file specified. Please add --in-place flag if you would like to migrate in place.'
        )

    if os.path.exists(output_file) and not force:
        echo.echo_critical('the output file already exists')

    with SandboxFolder(sandbox_in_repo=False) as folder:

        if zipfile.is_zipfile(input_file):
            extract_zip(input_file, folder, silent=silent)
        elif tarfile.is_tarfile(input_file):
            extract_tar(input_file, folder, silent=silent)
        else:
            echo.echo_critical('invalid file format, expected either a zip archive or gzipped tarball')

        try:
            with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
                data = json.load(fhandle)
            with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle:
                metadata = json.load(fhandle)
        except IOError:
            echo.echo_critical('export archive does not contain the required file {}'.format(fhandle.filename))

        old_version = migration.verify_metadata_version(metadata)
        if version <= old_version:
            echo.echo_success('nothing to be done - archive already at version {} >= {}'.format(old_version, version))
            return

        try:
            new_version = migration.migrate_recursively(metadata, data, folder, version)
        except ArchiveMigrationError as exception:
            echo.echo_critical(str(exception))

        with open(folder.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle, indent=4)

        with open(folder.get_abs_path('metadata.json'), 'wb') as fhandle:
            json.dump(metadata, fhandle)

        if archive_format in ['zip', 'zip-uncompressed']:
            compression = zipfile.ZIP_DEFLATED if archive_format == 'zip' else zipfile.ZIP_STORED
            with zipfile.ZipFile(output_file, mode='w', compression=compression, allowZip64=True) as archive:
                src = folder.abspath
                for dirpath, dirnames, filenames in os.walk(src):
                    relpath = os.path.relpath(dirpath, src)
                    for filename in dirnames + filenames:
                        real_src = os.path.join(dirpath, filename)
                        real_dest = os.path.join(relpath, filename)
                        archive.write(real_src, real_dest)
        elif archive_format == 'tar.gz':
            with tarfile.open(output_file, 'w:gz', format=tarfile.PAX_FORMAT, dereference=True) as archive:
                archive.add(folder.abspath, arcname='')

        if in_place:
            os.rename(output_file, input_file)
            tempdir.cleanup()

        if not silent:
            echo.echo_success('migrated the archive from version {} to {}'.format(old_version, new_version))