Esempio n. 1
0
    def test_v02_to_newest(self, temp_dir):
        """Test migration of exported files from v0.2 to newest export version"""
        # Get export file with export version 0.2
        input_file = get_archive_file('export_v0.2.aiida',
                                      **self.external_archive)
        output_file = os.path.join(temp_dir, 'output_file.aiida')

        # Perform the migration
        migrate_archive(input_file, output_file)
        metadata, _ = get_json_files(output_file)
        verify_metadata_version(metadata, version=newest_version)

        # Load the migrated file
        import_data(output_file, silent=True)

        # Do the necessary checks
        self.assertEqual(orm.QueryBuilder().append(orm.Node).count(),
                         self.node_count)

        # Verify that CalculationNodes have non-empty attribute dictionaries
        builder = orm.QueryBuilder().append(orm.CalculationNode)
        for [calculation] in builder.iterall():
            self.assertIsInstance(calculation.attributes, dict)
            self.assertNotEqual(len(calculation.attributes), 0)

        # Verify that the StructureData nodes maintained their (same) label, cell, and kinds
        builder = orm.QueryBuilder().append(orm.StructureData)
        self.assertEqual(
            builder.count(),
            self.struct_count,
            msg='There should be {} StructureData, instead {} were/was found'.
            format(self.struct_count, builder.count()))
        for structures in builder.all():
            structure = structures[0]
            self.assertEqual(structure.label, self.known_struct_label)
            self.assertEqual(structure.cell, self.known_cell)

        builder = orm.QueryBuilder().append(orm.StructureData,
                                            project=['attributes.kinds'])
        for [kinds] in builder.iterall():
            self.assertEqual(len(kinds), len(self.known_kinds))
            for kind in kinds:
                self.assertIn(kind,
                              self.known_kinds,
                              msg="Kind '{}' not found in: {}".format(
                                  kind, self.known_kinds))

        # Check that there is a StructureData that is an input of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.StructureData, tag='structure')
        builder.append(orm.CalculationNode, with_incoming='structure')
        self.assertGreater(len(builder.all()), 0)

        # Check that there is a RemoteData that is the output of a CalculationNode
        builder = orm.QueryBuilder()
        builder.append(orm.CalculationNode, tag='parent')
        builder.append(orm.RemoteData, with_incoming='parent')
        self.assertGreater(len(builder.all()), 0)
Esempio n. 2
0
    def test_migrate_v3_to_v4(self):
        """Test function migrate_v3_to_v4"""
        from aiida import get_version

        # Get metadata.json and data.json as dicts from v0.4 file archive
        metadata_v4, data_v4 = get_json_files('export_v0.4_simple.aiida', **self.core_archive)
        verify_metadata_version(metadata_v4, version='0.4')

        # Get metadata.json and data.json as dicts from v0.3 file archive
        # Cannot use 'get_json_files' for 'export_v0.3_simple.aiida',
        # because we need to pass the SandboxFolder to 'migrate_v3_to_v4'
        dirpath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive)

        with SandboxFolder(sandbox_in_repo=False) as folder:
            if zipfile.is_zipfile(dirpath_archive):
                extract_zip(dirpath_archive, folder, silent=True)
            elif tarfile.is_tarfile(dirpath_archive):
                extract_tar(dirpath_archive, folder, silent=True)
            else:
                raise ValueError('invalid file format, expected either a zip archive or gzipped tarball')

            try:
                with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
                    data_v3 = jsonload(fhandle)
                with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle:
                    metadata_v3 = jsonload(fhandle)
            except IOError:
                raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename))

            verify_metadata_version(metadata_v3, version='0.3')

            # Migrate to v0.4
            migrate_v3_to_v4(metadata_v3, data_v3, folder)
            verify_metadata_version(metadata_v3, version='0.4')

        # Remove AiiDA version, since this may change irregardless of the migration function
        metadata_v3.pop('aiida_version')
        metadata_v4.pop('aiida_version')

        # Assert conversion message in `metadata.json` is correct and then remove it for later assertions
        self.maxDiff = None  # pylint: disable=invalid-name
        conversion_message = 'Converted from version 0.3 to 0.4 with AiiDA v{}'.format(get_version())
        self.assertEqual(
            metadata_v3.pop('conversion_info')[-1],
            conversion_message,
            msg='The conversion message after migration is wrong'
        )
        metadata_v4.pop('conversion_info')

        # Assert changes were performed correctly
        self.assertDictEqual(
            metadata_v3,
            metadata_v4,
            msg='After migration, metadata.json should equal intended metadata.json from archives'
        )
        self.assertDictEqual(
            data_v3, data_v4, msg='After migration, data.json should equal intended data.json from archives'
        )
Esempio n. 3
0
    def test_migrate_v7_to_v8(self):
        """Test migration for file containing complete v0.7 era possibilities"""
        from aiida import get_version

        # Get metadata.json and data.json as dicts from v0.7 file archive
        metadata_v7, data_v7 = get_json_files('export_v0.7_simple.aiida',
                                              **self.core_archive)
        verify_metadata_version(metadata_v7, version='0.7')

        # Get metadata.json and data.json as dicts from v0.8 file archive
        metadata_v8, data_v8 = get_json_files('export_v0.8_simple.aiida',
                                              **self.core_archive)
        verify_metadata_version(metadata_v8, version='0.8')

        # Migrate to v0.8
        migrate_v7_to_v8(metadata_v7, data_v7)
        verify_metadata_version(metadata_v7, version='0.8')

        # Remove AiiDA version, since this may change irregardless of the migration function
        metadata_v7.pop('aiida_version')
        metadata_v8.pop('aiida_version')

        # Assert conversion message in `metadata.json` is correct and then remove it for later assertions
        self.maxDiff = None  # pylint: disable=invalid-name
        conversion_message = 'Converted from version 0.7 to 0.8 with AiiDA v{}'.format(
            get_version())
        self.assertEqual(metadata_v7.pop('conversion_info')[-1],
                         conversion_message,
                         msg='The conversion message after migration is wrong')
        metadata_v8.pop('conversion_info')

        # Assert changes were performed correctly
        self.assertDictEqual(
            metadata_v7,
            metadata_v8,
            msg=
            'After migration, metadata.json should equal intended metadata.json from archives'
        )
        self.assertDictEqual(
            data_v7,
            data_v8,
            msg=
            'After migration, data.json should equal intended data.json from archives'
        )
Esempio n. 4
0
    def test_migrate_v2_to_v3(self):
        """Test function migrate_v2_to_v3"""
        from aiida import get_version

        # Get metadata.json and data.json as dicts from v0.2 file archive
        metadata_v2, data_v2 = get_json_files('export_v0.2_simple.aiida',
                                              **self.core_archive)
        verify_metadata_version(metadata_v2, version='0.2')

        # Get metadata.json and data.json as dicts from v0.3 file archive
        metadata_v3, data_v3 = get_json_files('export_v0.3_simple.aiida',
                                              **self.core_archive)
        verify_metadata_version(metadata_v3, version='0.3')

        # Migrate to v0.3
        migrate_v2_to_v3(metadata_v2, data_v2)
        verify_metadata_version(metadata_v2, version='0.3')

        # Remove AiiDA version, since this may change irregardless of the migration function
        metadata_v2.pop('aiida_version')
        metadata_v3.pop('aiida_version')

        # Assert conversion message in `metadata.json` is correct and then remove it for later assertions
        conversion_message = 'Converted from version 0.2 to 0.3 with AiiDA v{}'.format(
            get_version())
        self.assertEqual(metadata_v2.pop('conversion_info')[-1],
                         conversion_message,
                         msg='The conversion message after migration is wrong')
        metadata_v3.pop('conversion_info')

        # Assert changes were performed correctly
        self.maxDiff = None  # pylint: disable=invalid-name
        self.assertDictEqual(
            metadata_v2,
            metadata_v3,
            msg=
            'After migration, metadata.json should equal intended metadata.json from archives'
        )
        self.assertDictEqual(
            data_v2,
            data_v3,
            msg=
            'After migration, data.json should equal intended data.json from archives'
        )
Esempio n. 5
0
    def test_migrate_v2_to_v3_complete(self):
        """Test migration for file containing complete v0.2 era possibilities"""

        # Get metadata.json and data.json as dicts from v0.2 file archive
        metadata, data = get_json_files('export_v0.2.aiida',
                                        **self.external_archive)
        verify_metadata_version(metadata, version='0.2')

        # Migrate to v0.3
        migrate_v2_to_v3(metadata, data)
        verify_metadata_version(metadata, version='0.3')

        self.maxDiff = None  # pylint: disable=invalid-name
        # Check link types
        legal_link_types = {
            'unspecified', 'createlink', 'returnlink', 'inputlink', 'calllink'
        }
        for link in data['links_uuid']:
            self.assertIn(
                'type',
                link,
                msg="key 'type' was not added to link: {}".format(link))
            self.assertIn(link['type'], legal_link_types)

        # Check entity names
        legal_entity_names = {
            'Node', 'Link', 'Group', 'Computer', 'User', 'Attribute'
        }
        for field in {'unique_identifiers', 'all_fields_info'}:
            for entity, prop in metadata[field].items():
                self.assertIn(
                    entity,
                    legal_entity_names,
                    msg=
                    "'{}' should now be equal to anyone of these: {}, but is not"
                    .format(entity, legal_entity_names))

                if field == 'all_fields_info':
                    for value in prop.values():
                        if 'requires' in value:
                            self.assertIn(
                                value['requires'],
                                legal_entity_names,
                                msg=
                                "'{}' should now be equal to anyone of these: {}, but is not"
                                .format(value, legal_entity_names))

        for entity in data['export_data']:
            self.assertIn(
                entity,
                legal_entity_names,
                msg="'{}' should now be equal to anyone of these: {}, but is not"
                .format(entity, legal_entity_names))
Esempio n. 6
0
    def migrate(self, filename_archive, version_old, version_new, migration_method):
        """Migrate one of the archives from `aiida-export-migration-tests`.

        :param filename_archive: the relative file name of the archive
        :param version_old: version of the archive
        :param version_new: version to migrate to
        :param migration_method: the migration method that should convert between version_old and version_new
        :return: the migrated metadata and data as a tuple
        """
        metadata, data = get_json_files(filename_archive, **self.external_archive)
        verify_metadata_version(metadata, version=version_old)

        migration_method(metadata, data)
        verify_metadata_version(metadata, version=version_new)

        return metadata, data
Esempio n. 7
0
    def test_migrate_v6_to_v7_complete(self):
        """Test migration for file containing complete v0.6 era possibilities"""
        # Get metadata.json and data.json as dicts from v0.6 file archive
        metadata, data = get_json_files('export_v0.6_manual.aiida', **self.external_archive)
        verify_metadata_version(metadata, version='0.6')

        # Migrate to v0.7
        migrate_v6_to_v7(metadata, data)
        verify_metadata_version(metadata, version='0.7')

        self.maxDiff = None  # pylint: disable=invalid-name
        # Check attributes of process.* nodes
        illegal_attrs = {'_sealed', '_finished', '_failed', '_aborted', '_do_abort'}
        new_attrs = {'sealed': True}
        for node_pk, attrs in data['node_attributes'].items():
            if data['export_data']['Node'][node_pk]['node_type'].startswith('process.'):
                # Check if illegal attributes were removed successfully
                for attr in illegal_attrs:
                    self.assertNotIn(
                        attr,
                        attrs,
                        msg="key '{}' should have been removed from attributes for Node <pk={}>".format(attr, node_pk)
                    )

                # Check new attributes were added successfully
                for attr in new_attrs:
                    self.assertIn(
                        attr, attrs, msg="key '{}' was not added to attributes for Node <pk={}>".format(attr, node_pk)
                    )
                    self.assertEqual(
                        attrs[attr],
                        new_attrs[attr],
                        msg="key '{}' should have had the value {}, but did instead have {}".format(
                            attr, new_attrs[attr], attrs[attr]
                        )
                    )

        # Check Attribute and Link have been removed
        illegal_entities = {'Attribute', 'Link'}
        for dict_ in {'unique_identifiers', 'all_fields_info'}:
            for entity in illegal_entities:
                self.assertNotIn(
                    entity,
                    metadata[dict_],
                    msg="key '{}' should have been removed from '{}' in metadata.json".format(entity, dict_)
                )
Esempio n. 8
0
    def test_migrate_v5_to_v6_calc_states(self):
        """Test the data migration of legacy `JobCalcState` attributes.

        This test has to use a local archive because the current archive from the `aiida-export-migration-tests`
        module does not include a `CalcJobNode` with a legacy `state` attribute.
        """
        # Get metadata.json and data.json as dicts from v0.5 file archive
        metadata, data = get_json_files('export_v0.5_simple.aiida',
                                        **self.core_archive)
        verify_metadata_version(metadata, version='0.5')

        calc_job_node_type = 'process.calculation.calcjob.CalcJobNode.'
        node_data = data['export_data'].get('Node', {})
        node_attributes = data['node_attributes']
        calc_jobs = {}
        for pk, values in node_data.items():
            if values['node_type'] == calc_job_node_type and 'state' in data[
                    'node_attributes'].get(pk, {}):
                calc_jobs[pk] = data['node_attributes'][pk]['state']

        # Migrate to v0.6
        migrate_v5_to_v6(metadata, data)
        verify_metadata_version(metadata, version='0.6')

        node_attributes = data['node_attributes']

        # The export archive contains a single `CalcJobNode` that had `state=FINISHED`.
        for pk, state in calc_jobs.items():

            attributes = node_attributes[pk]

            if STATE_MAPPING[state].exit_status is not None:
                self.assertEqual(attributes['exit_status'],
                                 STATE_MAPPING[state].exit_status)

            if STATE_MAPPING[state].process_state is not None:
                self.assertEqual(attributes['process_state'],
                                 STATE_MAPPING[state].process_state)

            if STATE_MAPPING[state].process_status is not None:
                self.assertEqual(attributes['process_status'],
                                 STATE_MAPPING[state].process_status)

            self.assertEqual(attributes['process_label'],
                             'Legacy JobCalculation')
Esempio n. 9
0
    def test_migrate_v7_to_v8_complete(self):
        """Test migration for file containing complete v0.7 era possibilities"""
        # Get metadata.json and data.json as dicts from v0.7 file archive
        metadata, data = get_json_files('export_v0.7_manual.aiida',
                                        **self.external_archive)
        verify_metadata_version(metadata, version='0.7')

        # Migrate to v0.8
        migrate_v7_to_v8(metadata, data)
        verify_metadata_version(metadata, version='0.8')

        self.maxDiff = None  # pylint: disable=invalid-name
        # Check that no links have the label '_return', since it should now be 'result'
        illegal_label = '_return'
        for link in data.get('links_uuid'):
            self.assertFalse(
                link['label'] == illegal_label,
                msg='The illegal link label {} was not expected to be present - '
                "it should now be 'result'".format(illegal_label))
Esempio n. 10
0
    def test_migrate_v4_to_v5_complete(self):
        """Test migration for file containing complete v0.4 era possibilities"""

        # Get metadata.json and data.json as dicts from v0.4 file archive
        metadata, data = get_json_files('export_v0.4.aiida',
                                        **self.external_archive)
        verify_metadata_version(metadata, version='0.4')

        # Migrate to v0.5
        migrate_v4_to_v5(metadata, data)
        verify_metadata_version(metadata, version='0.5')

        self.maxDiff = None  # pylint: disable=invalid-name
        # Check schema-changes
        removed_computer_attrs = {'transport_params'}
        removed_node_attrs = {'nodeversion', 'public'}
        for change in removed_computer_attrs:
            # data.json
            for computer in data['export_data']['Computer'].values():
                self.assertNotIn(change,
                                 computer,
                                 msg="'{}' unexpectedly found for {}".format(
                                     change, computer))
            # metadata.json
            self.assertNotIn(
                change,
                metadata['all_fields_info']['Computer'],
                msg="'{}' unexpectedly found in metadata.json for Computer".
                format(change))
        for change in removed_node_attrs:
            # data.json
            for node in data['export_data']['Node'].values():
                self.assertNotIn(change,
                                 node,
                                 msg="'{}' unexpectedly found for {}".format(
                                     change, node))
            # metadata.json
            self.assertNotIn(
                change,
                metadata['all_fields_info']['Node'],
                msg="'{}' unexpectedly found in metadata.json for Node".format(
                    change))
Esempio n. 11
0
def migration_data(request):
    """For a given tuple of two subsequent versions and corresponding migration method, return metadata and data."""
    version_old, version_new, migration_method = request.param

    filepath_archive = 'export_v{}_simple.aiida'.format(version_new)
    metadata_new, data_new = get_json_files(filepath_archive,
                                            filepath='export/migrate')
    verify_metadata_version(metadata_new, version=version_new)

    filepath_archive = get_archive_file(
        'export_v{}_simple.aiida'.format(version_old),
        filepath='export/migrate')

    with Archive(filepath_archive) as archive:
        metadata_old = copy.deepcopy(archive.meta_data)
        data_old = copy.deepcopy(archive.data)

        migration_method(metadata_old, data_old, archive.folder)
        verify_metadata_version(metadata_old, version=version_new)

    yield version_old, version_new, metadata_old, metadata_new, data_old, data_new
Esempio n. 12
0
    def test_migrate_v5_to_v6_complete(self):
        """Test migration for file containing complete v0.5 era possibilities"""
        # Get metadata.json and data.json as dicts from v0.5 file archive
        metadata, data = get_json_files('export_v0.5_manual.aiida',
                                        **self.external_archive)
        verify_metadata_version(metadata, version='0.5')

        # Migrate to v0.6
        migrate_v5_to_v6(metadata, data)
        verify_metadata_version(metadata, version='0.6')

        self.maxDiff = None  # pylint: disable=invalid-name
        # Explicitly check that conversion dictionaries were removed
        illegal_data_dicts = {
            'node_attributes_conversion', 'node_extras_conversion'
        }
        for dict_ in illegal_data_dicts:
            self.assertNotIn(
                dict_,
                data,
                msg="dictionary '{}' should have been removed from data.json".
                format(dict_))
Esempio n. 13
0
    def test_migrate_v5_to_v6_datetime(self):
        """Test the data migration of serialized datetime objects.

        Datetime attributes were serialized into strings, by first converting to UTC and then printing with the format
        '%Y-%m-%dT%H:%M:%S.%f'. In the database migration, datetimes were serialized *including* timezone information.
        Here we test that the archive migration correctly reattaches the timezone information. The archive that we are
        using `export_v0.5_simple.aiida` contains a node with the attribute "scheduler_lastchecktime".
        """
        # Get metadata.json and data.json as dicts from v0.5 file archive
        metadata, data = get_json_files('export_v0.5_simple.aiida',
                                        **self.core_archive)
        verify_metadata_version(metadata, version='0.5')

        for key, values in data['node_attributes'].items():
            if 'scheduler_lastchecktime' not in values:
                continue

            serialized_original = values['scheduler_lastchecktime']
            msg = 'the serialized datetime before migration should not contain a plus: {}'.format(
                serialized_original)
            self.assertTrue('+' not in serialized_original, msg=msg)

            # Migrate to v0.6
            migrate_v5_to_v6(metadata, data)
            verify_metadata_version(metadata, version='0.6')

            serialized_migrated = data['node_attributes'][key][
                'scheduler_lastchecktime']
            self.assertEqual(serialized_migrated,
                             serialized_original + '+00:00')
            break

        else:
            raise RuntimeError(
                'the archive `export_v0.5_simple.aiida` did not contain a node with the attribute '
                '`scheduler_lastchecktime` which is required for this test.')
Esempio n. 14
0
    def test_compare_migration_with_aiida_made(self):
        """
        Compare the migration of a Workflow made and exported with version 0.3 to version 0.4,
        and the same Workflow made and exported with version 0.4.
        (AiiDA versions 0.12.3 versus 1.0.0b2)
        NB: Since PKs and UUIDs will have changed, comparisons between 'data.json'-files will be made indirectly
        """
        # Get metadata.json and data.json as dicts from v0.3 file archive and migrate
        dirpath_archive = get_archive_file('export_v0.3.aiida',
                                           **self.external_archive)

        # Migrate
        with SandboxFolder(sandbox_in_repo=False) as folder:
            if zipfile.is_zipfile(dirpath_archive):
                extract_zip(dirpath_archive, folder, silent=True)
            elif tarfile.is_tarfile(dirpath_archive):
                extract_tar(dirpath_archive, folder, silent=True)
            else:
                raise ValueError(
                    'invalid file format, expected either a zip archive or gzipped tarball'
                )

            try:
                with open(folder.get_abs_path('data.json'),
                          'r',
                          encoding='utf8') as fhandle:
                    data_v3 = jsonload(fhandle)
                with open(folder.get_abs_path('metadata.json'),
                          'r',
                          encoding='utf8') as fhandle:
                    metadata_v3 = jsonload(fhandle)
            except IOError:
                raise NotExistent(
                    'export archive does not contain the required file {}'.
                    format(fhandle.filename))

            # Migrate to v0.4
            migrate_v3_to_v4(metadata_v3, data_v3, folder)

        # Get metadata.json and data.json as dicts from v0.4 file archive
        metadata_v4, data_v4 = get_json_files('export_v0.4.aiida',
                                              **self.external_archive)

        # Compare 'metadata.json'
        metadata_v3.pop('conversion_info')
        metadata_v3.pop('aiida_version')
        metadata_v4.pop('aiida_version')
        self.assertDictEqual(metadata_v3, metadata_v4)

        # Compare 'data.json'
        self.assertEqual(len(data_v3), len(data_v4))

        entities = {
            'Node': {
                'migrated': [],
                'made': []
            },
            'Computer': {
                'migrated': [],
                'made': []
            },
            'Group': {
                'migrated': [],
                'made': []
            }
        }  # User is special, see below
        for entity, details in entities.items():
            for node in data_v3['export_data'][entity].values():
                add = node.get('node_type', None)  # Node
                if not add:
                    add = node.get('hostname', None)  # Computer
                if not add:
                    add = node.get('type_string', None)  # Group
                self.assertIsNotNone(
                    add, msg="Helper variable 'add' should never be None")
                details['migrated'].append(add)
            for node in data_v4['export_data'][entity].values():
                add = node.get('node_type', None)  # Node
                if not add:
                    add = node.get('hostname', None)  # Computer
                if not add:
                    add = node.get('type_string', None)  # Group
                self.assertIsNotNone(
                    add, msg="Helper variable 'add' should never be None")
                details['made'].append(add)

            #### Two extra Dicts are present for AiiDA made export 0.4 file ####
            if entity == 'Node':
                details['migrated'].extend(2 * ['data.dict.Dict.'])

            self.assertListEqual(
                sorted(details['migrated']),
                sorted(details['made']),
                msg='Number of {}-entities differ, see diff for details'.
                format(entity))

        fields = {
            'groups_uuid', 'node_attributes_conversion', 'node_attributes',
            'node_extras', 'node_extras_conversion'
        }  # 'export_data' is special, see below
        for field in fields:
            if field != 'groups_uuid':
                correction = 2  # Two extra Dicts in AiiDA made export v0.4 file
            else:
                correction = 0

            self.assertEqual(
                len(data_v3[field]),
                len(data_v4[field]) - correction,
                msg='Number of entities in {} differs for the export files'.
                format(field))

        number_of_links_v3 = {
            'unspecified': 0,
            'create': 0,
            'return': 0,
            'input_calc': 0,
            'input_work': 0,
            'call_calc': 0,
            'call_work': 0
        }
        for link in data_v3['links_uuid']:
            number_of_links_v3[link['type']] += 1

        number_of_links_v4 = {
            'unspecified': 0,
            'create': 0,
            'return': 0,
            'input_calc': -2,  # Two extra Dict inputs to CalcJobNodes
            'input_work': 0,
            'call_calc': 0,
            'call_work': 0
        }
        for link in data_v4['links_uuid']:
            number_of_links_v4[link['type']] += 1

        self.assertDictEqual(
            number_of_links_v3,
            number_of_links_v4,
            msg=
            'There are a different number of specific links in the migrated export file than the AiiDA made one.'
        )

        self.assertEqual(number_of_links_v3['unspecified'], 0)
        self.assertEqual(number_of_links_v4['unspecified'], 0)

        # Special for data['export_data']['User']
        # There is an extra user in the migrated export v0.3 file
        self.assertEqual(len(data_v3['export_data']['User']),
                         len(data_v4['export_data']['User']) + 1)

        # Special for data['export_data']
        # There are Logs exported in the AiiDA made export v0.4 file
        self.assertEqual(
            len(data_v3['export_data']) + 1, len(data_v4['export_data']))
Esempio n. 15
0
    def test_compare_migration_with_aiida_made(self):
        """
        Compare the migration of a Workflow made and exported with version 0.2 to version 0.3,
        and the same Workflow made and exported with version 0.3.
        (AiiDA versions 0.9.1 versus 0.12.3)
        NB: Since PKs and UUIDs will have changed, comparisons between 'data.json'-files will be made indirectly
        """

        # Get metadata.json and data.json as dicts from v0.2 file archive and migrate
        metadata_v2, data_v2 = get_json_files('export_v0.2.aiida',
                                              **self.external_archive)
        migrate_v2_to_v3(metadata_v2, data_v2)

        # Get metadata.json and data.json as dicts from v0.3 file archive
        metadata_v3, data_v3 = get_json_files('export_v0.3.aiida',
                                              **self.external_archive)

        # Compare 'metadata.json'
        metadata_v2.pop('conversion_info')
        metadata_v2.pop('aiida_version')
        metadata_v3.pop('aiida_version')
        self.assertDictEqual(metadata_v2, metadata_v3)

        self.maxDiff = None
        # Compare 'data.json'
        self.assertEqual(len(data_v2), len(data_v3))

        entities = {
            'Node': {
                'migrated': [],
                'made': []
            },
            'Computer': {
                'migrated': [],
                'made': []
            },
            'Group': {
                'migrated': [],
                'made': []
            }
        }  # User is special, see below
        add = None
        for entity, details in entities.items():
            for node in data_v2['export_data'][entity].values():
                if entity == 'Node':  # Node
                    add = node.get('type')
                if not add:
                    add = node.get('hostname', None)  # Computer
                if not add:
                    add = node.get('name', None)  # Group
                self.assertIsNotNone(
                    add, msg="Helper variable 'add' should never be None")
                details['migrated'].append(add)
            for node in data_v3['export_data'][entity].values():
                if entity == 'Node':  # Node
                    add = node.get('type')

                    # Special case - BandsData did not exist for AiiDA v0.9.1
                    if add.endswith('BandsData.'):
                        add = 'data.array.kpoints.KpointsData.'

                if not add:
                    add = node.get('hostname', None)  # Computer
                if not add:
                    add = node.get('name', None)  # Group
                self.assertIsNotNone(
                    add, msg="Helper variable 'add' should never be None")
                details['made'].append(add)

            self.assertListEqual(
                sorted(details['migrated']),
                sorted(details['made']),
                msg='Number of {}-entities differ, see diff for details'.
                format(entity))

        fields = {
            'export_data', 'groups_uuid', 'node_attributes_conversion',
            'node_attributes'
        }
        for field in fields:
            self.assertEqual(
                len(data_v2[field]),
                len(data_v3[field]),
                msg='Number of entities in {} differs for the export files'.
                format(field))

        number_of_links_v2 = {
            'unspecified': 0,
            'createlink':
            2,  # There are two extra create-links in the AiiDA made export v0.3 file
            'returnlink': 0,
            'inputlink': 0,
            'calllink': 0
        }
        for link in data_v2['links_uuid']:
            number_of_links_v2[link['type']] += 1

        number_of_links_v3 = {
            'unspecified': 0,
            'createlink': 0,
            'returnlink': 0,
            'inputlink': 0,
            'calllink': 0
        }
        for link in data_v3['links_uuid']:
            number_of_links_v3[link['type']] += 1

        self.assertDictEqual(
            number_of_links_v2,
            number_of_links_v3,
            msg=
            'There are a different number of specific links in the migrated export file than the AiiDA made one.'
        )

        self.assertEqual(number_of_links_v2['unspecified'], 0)
        self.assertEqual(number_of_links_v3['unspecified'], 0)

        # Special for data['export_data']['User']
        # There is an extra user in the AiiDA made export file
        self.assertEqual(
            len(data_v2['export_data']['User']) + 1,
            len(data_v3['export_data']['User']))