예제 #1
0
    def test_partial_migrations(self, core_archive, tmp_path):
        """Test migrations from a specific version (0.3) to other versions."""
        filepath_archive = get_archive_file('export_v0.3_simple.aiida',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.3')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        with pytest.raises(TypeError, match='version must be a string'):
            migrator.migrate(0.2, tmp_path / 'v02.aiida')

        with pytest.raises(ArchiveMigrationError,
                           match='No migration pathway available'):
            migrator.migrate('0.2', tmp_path / 'v02.aiida')

        # same version migration
        out_path = migrator.migrate('0.3', tmp_path / 'v03.aiida')
        # if no migration performed the output path is None
        assert out_path is None

        # newer version migration
        migrator.migrate('0.5', tmp_path / 'v05.aiida')
        assert (tmp_path / 'v05.aiida').exists()

        metadata = read_json_files(tmp_path / 'v05.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.5')
예제 #2
0
def test_migrations(migration_data, tmp_path):
    """Test each migration method from the `aiida.tools.importexport.archive.migrations` module."""
    version_old, (version_new, migration_method) = migration_data

    filepath_archive_new = get_archive_file(f'export_v{version_new}_simple.aiida', filepath='export/migrate')

    metadata_new = read_json_files(filepath_archive_new, names=['metadata.json'])[0]
    verify_metadata_version(metadata_new, version=version_new)
    data_new = read_json_files(filepath_archive_new, names=['data.json'])[0]

    filepath_archive_old = get_archive_file(f'export_v{version_old}_simple.aiida', filepath='export/migrate')

    out_path = tmp_path / 'out.aiida'
    with zipfile.ZipFile(filepath_archive_old, 'r', allowZip64=True) as handle:
        handle.extractall(out_path)

    folder = CacheFolder(out_path)
    migration_method(folder)

    _, metadata_old = folder.load_json('metadata.json')
    _, data_old = folder.load_json('data.json')

    verify_metadata_version(metadata_old, version=version_new)

    # Remove AiiDA version, since this may change regardless of the migration function
    metadata_old.pop('aiida_version')
    metadata_new.pop('aiida_version')

    # Assert conversion message in `metadata.json` is correct and then remove it for later assertions
    metadata_new.pop('conversion_info')
    message = f'Converted from version {version_old} to {version_new} with AiiDA v{get_version()}'
    assert metadata_old.pop('conversion_info')[-1] == message, 'Conversion message after migration is wrong'

    assert metadata_old == metadata_new
    assert data_old == data_new
예제 #3
0
    def test_full_migration(self, tmp_path, core_archive):
        """Test a migration from the first to newest archive version."""

        filepath_archive = get_archive_file('export_v0.1_simple.aiida',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.1')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        migrator.migrate(newest_version, tmp_path / 'out.aiida')
        assert detect_archive_type(tmp_path / 'out.aiida') == 'zip'
        metadata = read_json_files(tmp_path / 'out.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)
예제 #4
0
    def test_tar_migration(self, tmp_path, core_archive):
        """Test a migration using a tar compressed in/out file."""

        filepath_archive = get_archive_file('export_v0.2_simple.tar.gz',
                                            **core_archive)

        metadata = read_json_files(filepath_archive,
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version='0.2')

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)

        migrator.migrate(newest_version,
                         tmp_path / 'out.aiida',
                         out_compression='tar.gz')
        assert detect_archive_type(tmp_path / 'out.aiida') == 'tar.gz'
        metadata = read_json_files(tmp_path / 'out.aiida',
                                   names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)
예제 #5
0
def test_migrate_v5_to_v6_calc_states(core_archive, migrate_from_func):
    """Test the data migration of legacy `JobCalcState` attributes.

    This test has to use a local archive because the current archive from the `aiida-export-migration-tests`
    module does not include a `CalcJobNode` with a legacy `state` attribute.
    """
    # Get metadata.json and data.json as dicts from v0.5 file archive
    archive_path = get_archive_file('export_v0.5_simple.aiida', **core_archive)
    metadata, data = read_json_files(archive_path)  # pylint: disable=unbalanced-tuple-unpacking

    verify_metadata_version(metadata, version='0.5')

    calc_job_node_type = 'process.calculation.calcjob.CalcJobNode.'
    node_data = data['export_data'].get('Node', {})
    node_attributes = data['node_attributes']
    calc_jobs = {}
    for pk, values in node_data.items():
        if values['node_type'] == calc_job_node_type and 'state' in data[
                'node_attributes'].get(pk, {}):
            calc_jobs[pk] = data['node_attributes'][pk]['state']

    # Migrate to v0.6
    metadata, data = migrate_from_func('export_v0.5_simple.aiida', '0.5',
                                       '0.6', migrate_v5_to_v6, core_archive)
    verify_metadata_version(metadata, version='0.6')

    node_attributes = data['node_attributes']

    # The export archive contains a single `CalcJobNode` that had `state=FINISHED`.
    for pk, state in calc_jobs.items():

        attributes = node_attributes[pk]

        if STATE_MAPPING[state].exit_status is not None:
            assert attributes['exit_status'] == STATE_MAPPING[
                state].exit_status

        if STATE_MAPPING[state].process_state is not None:
            assert attributes['process_state'] == STATE_MAPPING[
                state].process_state

        if STATE_MAPPING[state].process_status is not None:
            assert attributes['process_status'] == STATE_MAPPING[
                state].process_status

        assert attributes['process_label'] == 'Legacy JobCalculation'
예제 #6
0
def test_migrate_v5_to_v6_datetime(core_archive, migrate_from_func):
    """Test the data migration of serialized datetime objects.

    Datetime attributes were serialized into strings, by first converting to UTC and then printing with the format
    '%Y-%m-%dT%H:%M:%S.%f'. In the database migration, datetimes were serialized *including* timezone information.
    Here we test that the archive migration correctly reattaches the timezone information. The archive that we are
    using `export_v0.5_simple.aiida` contains a node with the attribute "scheduler_lastchecktime".
    """
    # Get metadata.json and data.json as dicts from v0.5 file archive
    archive_path = get_archive_file('export_v0.5_simple.aiida', **core_archive)
    metadata, data = read_json_files(archive_path)  # pylint: disable=unbalanced-tuple-unpacking

    verify_metadata_version(metadata, version='0.5')

    for key, values in data['node_attributes'].items():
        if 'scheduler_lastchecktime' not in values:
            continue

        serialized_original = values['scheduler_lastchecktime']
        msg = f'the serialized datetime before migration should not contain a plus: {serialized_original}'
        assert '+' not in serialized_original, msg

        # Migrate to v0.6
        metadata, data = migrate_from_func('export_v0.5_simple.aiida', '0.5',
                                           '0.6', migrate_v5_to_v6,
                                           core_archive)
        verify_metadata_version(metadata, version='0.6')

        serialized_migrated = data['node_attributes'][key][
            'scheduler_lastchecktime']
        assert serialized_migrated == f'{serialized_original}+00:00'
        break

    else:
        raise RuntimeError(
            'the archive `export_v0.5_simple.aiida` did not contain a node with the attribute '
            '`scheduler_lastchecktime` which is required for this test.')
예제 #7
0
def test_compare_migration_with_aiida_made(external_archive,
                                           migrate_from_func):
    """
    Compare the migration of a Workflow made and exported with version 0.3 to version 0.4,
    and the same Workflow made and exported with version 0.4.
    (AiiDA versions 0.12.3 versus 1.0.0b2)
    NB: Since PKs and UUIDs will have changed, comparisons between 'data.json'-files will be made indirectly
    """
    # Get metadata.json and data.json as dicts from v0.3 file archive and migrate
    metadata_v3, data_v3 = migrate_from_func('export_v0.3.aiida', '0.3', '0.4',
                                             migrate_v3_to_v4)

    # Get metadata.json and data.json as dicts from v0.4 file archive
    archive_path = get_archive_file('export_v0.4.aiida', **external_archive)
    metadata_v4, data_v4 = read_json_files(archive_path)  # pylint: disable=unbalanced-tuple-unpacking

    # Compare 'metadata.json'
    metadata_v3.pop('conversion_info')
    metadata_v3.pop('aiida_version')
    metadata_v4.pop('aiida_version')
    assert metadata_v3 == metadata_v4

    # Compare 'data.json'
    assert len(data_v3) == len(data_v4)

    entities = {
        'Node': {
            'migrated': [],
            'made': []
        },
        'Computer': {
            'migrated': [],
            'made': []
        },
        'Group': {
            'migrated': [],
            'made': []
        }
    }  # User is special, see below
    for entity, details in entities.items():
        for node in data_v3['export_data'][entity].values():
            add = node.get('node_type', None)  # Node
            if not add:
                add = node.get('hostname', None)  # Computer
            if not add:
                add = node.get('type_string', None)  # Group
            assert add is not None, "Helper variable 'add' should never be None"
            details['migrated'].append(add)
        for node in data_v4['export_data'][entity].values():
            add = node.get('node_type', None)  # Node
            if not add:
                add = node.get('hostname', None)  # Computer
            if not add:
                add = node.get('type_string', None)  # Group
            assert add is not None, "Helper variable 'add' should never be None"
            details['made'].append(add)

        #### Two extra Dicts are present for AiiDA made export 0.4 file ####
        if entity == 'Node':
            details['migrated'].extend(2 * ['data.dict.Dict.'])

        assert sorted(details['migrated']) == sorted(details['made']), (
            f'Number of {entity}-entities differ, see diff for details')

    fields = {
        'groups_uuid', 'node_attributes_conversion', 'node_attributes',
        'node_extras', 'node_extras_conversion'
    }  # 'export_data' is special, see below
    for field in fields:
        if field != 'groups_uuid':
            correction = 2  # Two extra Dicts in AiiDA made export v0.4 file
        else:
            correction = 0

        assert len(data_v3[field]) == len(data_v4[field]) - correction, (
            f'Number of entities in {field} differs for the archive files')

    number_of_links_v3 = {
        'unspecified': 0,
        'create': 0,
        'return': 0,
        'input_calc': 0,
        'input_work': 0,
        'call_calc': 0,
        'call_work': 0
    }
    for link in data_v3['links_uuid']:
        number_of_links_v3[link['type']] += 1

    number_of_links_v4 = {
        'unspecified': 0,
        'create': 0,
        'return': 0,
        'input_calc': -2,  # Two extra Dict inputs to CalcJobNodes
        'input_work': 0,
        'call_calc': 0,
        'call_work': 0
    }
    for link in data_v4['links_uuid']:
        number_of_links_v4[link['type']] += 1

    assert number_of_links_v3 == number_of_links_v4, (
        'There are a different number of specific links in the migrated archive file than the AiiDA made one.'
    )

    assert number_of_links_v3['unspecified'] == 0
    assert number_of_links_v4['unspecified'] == 0

    # Special for data['export_data']['User']
    # There is an extra user in the migrated export v0.3 file
    assert len(data_v3['export_data']['User']) == len(
        data_v4['export_data']['User']) + 1

    # Special for data['export_data']
    # There are Logs exported in the AiiDA made export v0.4 file
    assert len(data_v3['export_data']) + 1 == len(data_v4['export_data'])
예제 #8
0
    def test_migrate_to_newest(self, external_archive, tmp_path, filename,
                               nodes):
        """Test migrations from old archives to newest version."""
        filepath_archive = get_archive_file(filename, **external_archive)

        out_path = tmp_path / 'out.aiida'

        migrator_cls = get_migrator(detect_archive_type(filepath_archive))
        migrator = migrator_cls(filepath_archive)
        out_path = migrator.migrate(newest_version,
                                    out_path) or filepath_archive

        metadata = read_json_files(out_path, names=['metadata.json'])[0]
        verify_metadata_version(metadata, version=newest_version)

        # Load the migrated file
        import_data(out_path)

        # count nodes
        archive_node_count = orm.QueryBuilder().append(orm.Node).count()
        assert archive_node_count == nodes

        # Verify that CalculationNodes have non-empty attribute dictionaries
        calc_query = orm.QueryBuilder().append(orm.CalculationNode)
        for [calculation] in calc_query.iterall():
            assert isinstance(calculation.attributes, dict)
            assert len(calculation.attributes) > 0

        # Verify that the StructureData nodes maintained their (same) label, cell, and kinds
        struct_query = orm.QueryBuilder().append(orm.StructureData)
        assert struct_query.count() == 2
        for structure in struct_query.all(flat=True):
            assert structure.label == ''
            assert structure.cell == [[4, 0, 0], [0, 4, 0], [0, 0, 4]]

        known_kinds = [
            {
                'name': 'Ba',
                'mass': 137.327,
                'weights': [1],
                'symbols': ['Ba']
            },
            {
                'name': 'Ti',
                'mass': 47.867,
                'weights': [1],
                'symbols': ['Ti']
            },
            {
                'name': 'O',
                'mass': 15.9994,
                'weights': [1],
                'symbols': ['O']
            },
        ]
        kind_query = orm.QueryBuilder().append(orm.StructureData,
                                               project=['attributes.kinds'])
        for kinds in kind_query.all(flat=True):
            assert len(kinds) == len(known_kinds)
            for kind in kinds:
                assert kind in known_kinds

        # Check that there is a StructureData that is an input of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.StructureData, tag='structure')
        builder.append(orm.CalculationNode, with_incoming='structure')
        assert len(builder.all()) > 0

        # Check that there is a RemoteData that is the output of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.CalculationNode, tag='parent')
        builder.append(orm.RemoteData, with_incoming='parent')
        assert len(builder.all()) > 0
예제 #9
0
def test_compare_migration_with_aiida_made(migrate_from_func,
                                           external_archive):
    """
    Compare the migration of a Workflow made and exported with version 0.2 to version 0.3,
    and the same Workflow made and exported with version 0.3.
    (AiiDA versions 0.9.1 versus 0.12.3)
    NB: Since PKs and UUIDs will have changed, comparisons between 'data.json'-files will be made indirectly
    """

    # Get metadata.json and data.json as dicts from v0.2 file archive and migrate
    metadata_v2, data_v2 = migrate_from_func('export_v0.2.aiida', '0.2', '0.3',
                                             migrate_v2_to_v3)

    # Get metadata.json and data.json as dicts from v0.3 file archive
    archive_path = get_archive_file('export_v0.3.aiida', **external_archive)
    metadata_v3, data_v3 = read_json_files(archive_path)  # pylint: disable=unbalanced-tuple-unpacking

    # Compare 'metadata.json'
    metadata_v2.pop('conversion_info')
    metadata_v2.pop('aiida_version')
    metadata_v3.pop('aiida_version')
    assert metadata_v2 == metadata_v3

    # Compare 'data.json'
    assert len(data_v2) == len(data_v3)

    entities = {
        'Node': {
            'migrated': [],
            'made': []
        },
        'Computer': {
            'migrated': [],
            'made': []
        },
        'Group': {
            'migrated': [],
            'made': []
        }
    }  # User is special, see below
    add = None
    for entity, details in entities.items():
        for node in data_v2['export_data'][entity].values():
            if entity == 'Node':  # Node
                add = node.get('type')
            if not add:
                add = node.get('hostname', None)  # Computer
            if not add:
                add = node.get('name', None)  # Group
            assert add is not None, "Helper variable 'add' should never be None"
            details['migrated'].append(add)
        for node in data_v3['export_data'][entity].values():
            if entity == 'Node':  # Node
                add = node.get('type')

                # Special case - BandsData did not exist for AiiDA v0.9.1
                if add.endswith('BandsData.'):
                    add = 'data.array.kpoints.KpointsData.'

            if not add:
                add = node.get('hostname', None)  # Computer
            if not add:
                add = node.get('name', None)  # Group
            assert add is not None, "Helper variable 'add' should never be None"
            details['made'].append(add)

        assert sorted(details['migrated']) == sorted(details['made']), (
            f'Number of {entity}-entities differ, see diff for details')

    fields = {
        'export_data', 'groups_uuid', 'node_attributes_conversion',
        'node_attributes'
    }
    for field in fields:
        assert len(data_v2[field]) == len(data_v3[field]), (
            f'Number of entities in {field} differs for the archive files')

    number_of_links_v2 = {
        'unspecified': 0,
        'createlink':
        2,  # There are two extra create-links in the AiiDA made export v0.3 file
        'returnlink': 0,
        'inputlink': 0,
        'calllink': 0
    }
    for link in data_v2['links_uuid']:
        number_of_links_v2[link['type']] += 1

    number_of_links_v3 = {
        'unspecified': 0,
        'createlink': 0,
        'returnlink': 0,
        'inputlink': 0,
        'calllink': 0
    }
    for link in data_v3['links_uuid']:
        number_of_links_v3[link['type']] += 1

    assert number_of_links_v2 == number_of_links_v3, (
        'There are a different number of specific links in the migrated archive file than the AiiDA made one.'
    )

    assert number_of_links_v2['unspecified'] == 0
    assert number_of_links_v3['unspecified'] == 0

    # Special for data['export_data']['User']
    # There is an extra user in the AiiDA made archive file
    assert len(data_v2['export_data']['User']) + 1 == len(
        data_v3['export_data']['User'])