def test_migrations(migration_data, tmp_path): """Test each migration method from the `aiida.tools.importexport.archive.migrations` module.""" version_old, (version_new, migration_method) = migration_data filepath_archive_new = get_archive_file(f'export_v{version_new}_simple.aiida', filepath='export/migrate') metadata_new = read_json_files(filepath_archive_new, names=['metadata.json'])[0] verify_metadata_version(metadata_new, version=version_new) data_new = read_json_files(filepath_archive_new, names=['data.json'])[0] filepath_archive_old = get_archive_file(f'export_v{version_old}_simple.aiida', filepath='export/migrate') out_path = tmp_path / 'out.aiida' with zipfile.ZipFile(filepath_archive_old, 'r', allowZip64=True) as handle: handle.extractall(out_path) folder = CacheFolder(out_path) migration_method(folder) _, metadata_old = folder.load_json('metadata.json') _, data_old = folder.load_json('data.json') verify_metadata_version(metadata_old, version=version_new) # Remove AiiDA version, since this may change regardless of the migration function metadata_old.pop('aiida_version') metadata_new.pop('aiida_version') # Assert conversion message in `metadata.json` is correct and then remove it for later assertions metadata_new.pop('conversion_info') message = f'Converted from version {version_old} to {version_new} with AiiDA v{get_version()}' assert metadata_old.pop('conversion_info')[-1] == message, 'Conversion message after migration is wrong' assert metadata_old == metadata_new assert data_old == data_new
def test_import_to_group(self): """ Test import to existing Group and that Nodes are added correctly for multiple imports of the same, as well as separate, archives. """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), get_archive_file(self.newest_archive, filepath=self.archive_path) ] group_label = 'import_madness' group = Group(group_label).store() self.assertTrue(group.is_empty, msg='The Group should be empty.') # Invoke `verdi import`, making sure there are no exceptions options = ['-G', group.label] + [archives[0]] result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertFalse(group.is_empty, msg='The Group should no longer be empty.') nodes_in_group = group.count() # Invoke `verdi import` again, making sure Group count doesn't change options = ['-G', group.label] + [archives[0]] result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertEqual( group.count(), nodes_in_group, msg='The Group count should not have changed from {}. Instead it is now {}'.format( nodes_in_group, group.count() ) ) # Invoke `verdi import` again with new archive, making sure Group count is upped options = ['-G', group.label] + [archives[1]] result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertGreater( group.count(), nodes_in_group, msg='There should now be more than {} nodes in group {} , instead there are {}'.format( nodes_in_group, group_label, group.count() ) )
def test_import_url_and_local_archives(self): """Test import of both a remote and local archive""" url_archive = 'export_v0.4_no_UPF.aiida' local_archive = self.newest_archive options = [ get_archive_file(local_archive, filepath=self.archive_path), self.url_path + url_archive, get_archive_file(local_archive, filepath=self.archive_path) ] result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, result.output) self.assertEqual(result.exit_code, 0, result.output)
def test_import_archive(self): """ Test import for archive files from disk """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), get_archive_file(self.newest_archive, filepath=self.archive_path) ] options = [] + archives result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, result.output) self.assertEqual(result.exit_code, 0, result.output)
def test_migrate_in_place(self): """Test that passing the -i/--in-place option will overwrite the passed file.""" archive = 'export_v0.1_simple.aiida' target_version = '0.2' filename_input = get_archive_file(archive, filepath=self.fixture_archive) filename_tmp = next(tempfile._get_candidate_names()) # pylint: disable=protected-access try: # copy file (don't want to overwrite test data) shutil.copy(filename_input, filename_tmp) # specifying both output and in-place should except options = [ filename_tmp, '--in-place', '--output-file', 'test.aiida' ] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertIsNotNone(result.exception, result.output) # specifying neither output nor in-place should except options = [filename_tmp] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertIsNotNone(result.exception, result.output) # check that in-place migration produces a valid archive in place of the old file options = [filename_tmp, '--in-place', '--version', target_version] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertIsNone(result.exception, result.output) self.assertTrue(os.path.isfile(filename_tmp)) # check that files in zip file are ok self.assertEqual(zipfile.ZipFile(filename_tmp).testzip(), None) with Archive(filename_tmp) as archive_object: self.assertEqual(archive_object.version_format, target_version) finally: os.remove(filename_tmp)
def test_context_required(self): """Verify that accessing a property of an Archive outside of a context manager raises.""" with self.assertRaises(InvalidOperation): filepath = get_archive_file('export_v0.1_simple.aiida', filepath='export/migrate') archive = Archive(filepath) archive.version_format # pylint: disable=pointless-statement
def test_partial_migrations(self, core_archive, tmp_path): """Test migrations from a specific version (0.3) to other versions.""" filepath_archive = get_archive_file('export_v0.3_simple.aiida', **core_archive) metadata = read_json_files(filepath_archive, names=['metadata.json'])[0] verify_metadata_version(metadata, version='0.3') migrator_cls = get_migrator(detect_archive_type(filepath_archive)) migrator = migrator_cls(filepath_archive) with pytest.raises(TypeError, match='version must be a string'): migrator.migrate(0.2, tmp_path / 'v02.aiida') with pytest.raises(ArchiveMigrationError, match='No migration pathway available'): migrator.migrate('0.2', tmp_path / 'v02.aiida') # same version migration out_path = migrator.migrate('0.3', tmp_path / 'v03.aiida') # if no migration performed the output path is None assert out_path is None # newer version migration migrator.migrate('0.5', tmp_path / 'v05.aiida') assert (tmp_path / 'v05.aiida').exists() metadata = read_json_files(tmp_path / 'v05.aiida', names=['metadata.json'])[0] verify_metadata_version(metadata, version='0.5')
def test_import_make_new_group(self): """Make sure imported entities are saved in new Group""" # Initialization group_label = 'new_group_for_verdi_import' archives = [ get_archive_file(self.newest_archive, filepath=self.archive_path) ] # Check Group does not already exist group_search = Group.objects.find(filters={'label': group_label}) self.assertEqual( len(group_search), 0, msg= f"A Group with label '{group_label}' already exists, this shouldn't be." ) # Invoke `verdi import`, making sure there are no exceptions options = ['-G', group_label] + archives result = self.cli_runner.invoke(cmd_archive.import_archive, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) # Make sure new Group was created (group, new_group) = Group.objects.get_or_create(group_label) self.assertFalse( new_group, msg= 'The Group should not have been created now, but instead when it was imported.' ) self.assertFalse(group.is_empty, msg='The Group should not be empty.')
def test_no_node_export(self, temp_dir): """Test migration of export file that has no Nodes""" input_file = get_archive_file('export_v0.3_no_Nodes.aiida', **self.external_archive) output_file = os.path.join(temp_dir, 'output_file.aiida') # Known entities computer_uuids = [self.computer.uuid] # pylint: disable=no-member user_emails = [orm.User.objects.get_default().email] # Known export file content used for checks node_count = 0 computer_count = 1 + 1 # localhost is always present computer_uuids.append('4f33c6fd-b624-47df-9ffb-a58f05d323af') user_emails.append('aiida@localhost') # Perform the migration migrate_archive(input_file, output_file) # Load the migrated file import_data(output_file, silent=True) # Check known number of entities is present self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), node_count) self.assertEqual(orm.QueryBuilder().append(orm.Computer).count(), computer_count) # Check unique identifiers computers = orm.QueryBuilder().append(orm.Computer, project=['uuid']).all()[0][0] users = orm.QueryBuilder().append(orm.User, project=['email']).all()[0][0] self.assertIn(computers, computer_uuids) self.assertIn(users, user_emails)
def test_no_node_migration(self, tmp_path, external_archive): """Test migration of archive file that has no Node entities.""" input_file = get_archive_file('export_v0.3_no_Nodes.aiida', **external_archive) output_file = tmp_path / 'output_file.aiida' migrator_cls = get_migrator(detect_archive_type(input_file)) migrator = migrator_cls(input_file) # Perform the migration migrator.migrate(newest_version, output_file) # Load the migrated file import_data(output_file) # Check known entities assert orm.QueryBuilder().append(orm.Node).count() == 0 computer_query = orm.QueryBuilder().append(orm.Computer, project=['uuid']) assert computer_query.all(flat=True) == [ '4f33c6fd-b624-47df-9ffb-a58f05d323af' ] user_query = orm.QueryBuilder().append(orm.User, project=['email']) assert set(user_query.all(flat=True)) == { orm.User.objects.get_default().email, 'aiida@localhost' }
def test_migrate_recursively_specific_version(self): """Test the `version` argument of the `migrate_recursively` function.""" filepath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive) with Archive(filepath_archive) as archive: # Incorrect type with self.assertRaises(TypeError): migrate_recursively(archive.meta_data, archive.data, None, version=0.2) # Backward migrations are not supported with self.assertRaises(ArchiveMigrationError): migrate_recursively(archive.meta_data, archive.data, None, version='0.2') migrate_recursively(archive.meta_data, archive.data, None, version='0.3') migrated_version = '0.5' version = migrate_recursively(archive.meta_data, archive.data, None, version=migrated_version) self.assertEqual(version, migrated_version)
def test_import_folder(self): """Verify a pre-extracted archive (aka. a folder with the archive structure) can be imported. It is important to check that the source directory or any of its contents are not deleted after import. """ from aiida.common.folders import SandboxFolder from tests.utils.archives import get_archive_file from aiida.tools.importexport.common.archive import extract_zip archive = get_archive_file('arithmetic.add.aiida', filepath='calcjob') with SandboxFolder() as temp_dir: extract_zip(archive, temp_dir, silent=True) # Make sure the JSON files and the nodes subfolder was correctly extracted (is present), # then try to import it by passing the extracted folder to the import function. for name in {'metadata.json', 'data.json', 'nodes'}: self.assertTrue(os.path.exists(os.path.join(temp_dir.abspath, name))) # Get list of all folders in extracted archive org_folders = [] for dirpath, dirnames, _ in os.walk(temp_dir.abspath): org_folders += [os.path.join(dirpath, dirname) for dirname in dirnames] import_data(temp_dir.abspath, silent=True) # Check nothing from the source was deleted src_folders = [] for dirpath, dirnames, _ in os.walk(temp_dir.abspath): src_folders += [os.path.join(dirpath, dirname) for dirname in dirnames] self.maxDiff = None # pylint: disable=invalid-name self.assertListEqual(org_folders, src_folders)
def test_v02_to_newest(self, temp_dir): """Test migration of exported files from v0.2 to newest export version""" # Get export file with export version 0.2 input_file = get_archive_file('export_v0.2.aiida', **self.external_archive) output_file = os.path.join(temp_dir, 'output_file.aiida') # Perform the migration migrate_archive(input_file, output_file) metadata, _ = get_json_files(output_file) verify_metadata_version(metadata, version=newest_version) # Load the migrated file import_data(output_file, silent=True) # Do the necessary checks self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), self.node_count) # Verify that CalculationNodes have non-empty attribute dictionaries builder = orm.QueryBuilder().append(orm.CalculationNode) for [calculation] in builder.iterall(): self.assertIsInstance(calculation.attributes, dict) self.assertNotEqual(len(calculation.attributes), 0) # Verify that the StructureData nodes maintained their (same) label, cell, and kinds builder = orm.QueryBuilder().append(orm.StructureData) self.assertEqual( builder.count(), self.struct_count, msg='There should be {} StructureData, instead {} were/was found'. format(self.struct_count, builder.count())) for structures in builder.all(): structure = structures[0] self.assertEqual(structure.label, self.known_struct_label) self.assertEqual(structure.cell, self.known_cell) builder = orm.QueryBuilder().append(orm.StructureData, project=['attributes.kinds']) for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format( kind, self.known_kinds)) # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() builder.append(orm.StructureData, tag='structure') builder.append(orm.CalculationNode, with_incoming='structure') self.assertGreater(len(builder.all()), 0) # Check that there is a RemoteData that is the output of a CalculationNode builder = orm.QueryBuilder() builder.append(orm.CalculationNode, tag='parent') builder.append(orm.RemoteData, with_incoming='parent') self.assertGreater(len(builder.all()), 0)
def test_migrate_v3_to_v4(self): """Test function migrate_v3_to_v4""" from aiida import get_version # Get metadata.json and data.json as dicts from v0.4 file archive metadata_v4, data_v4 = get_json_files('export_v0.4_simple.aiida', **self.core_archive) verify_metadata_version(metadata_v4, version='0.4') # Get metadata.json and data.json as dicts from v0.3 file archive # Cannot use 'get_json_files' for 'export_v0.3_simple.aiida', # because we need to pass the SandboxFolder to 'migrate_v3_to_v4' dirpath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive) with SandboxFolder(sandbox_in_repo=False) as folder: if zipfile.is_zipfile(dirpath_archive): extract_zip(dirpath_archive, folder, silent=True) elif tarfile.is_tarfile(dirpath_archive): extract_tar(dirpath_archive, folder, silent=True) else: raise ValueError('invalid file format, expected either a zip archive or gzipped tarball') try: with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data_v3 = jsonload(fhandle) with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata_v3 = jsonload(fhandle) except IOError: raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) verify_metadata_version(metadata_v3, version='0.3') # Migrate to v0.4 migrate_v3_to_v4(metadata_v3, data_v3, folder) verify_metadata_version(metadata_v3, version='0.4') # Remove AiiDA version, since this may change irregardless of the migration function metadata_v3.pop('aiida_version') metadata_v4.pop('aiida_version') # Assert conversion message in `metadata.json` is correct and then remove it for later assertions self.maxDiff = None # pylint: disable=invalid-name conversion_message = 'Converted from version 0.3 to 0.4 with AiiDA v{}'.format(get_version()) self.assertEqual( metadata_v3.pop('conversion_info')[-1], conversion_message, msg='The conversion message after migration is wrong' ) metadata_v4.pop('conversion_info') # Assert changes were performed correctly self.assertDictEqual( metadata_v3, metadata_v4, msg='After migration, metadata.json should equal intended metadata.json from archives' ) self.assertDictEqual( data_v3, data_v4, msg='After migration, data.json should equal intended data.json from archives' )
def test_non_interactive_and_migration(self): """Test options `--non-interactive` and `--migration`/`--no-migration` `migration` = True (default), `non_interactive` = False (default), Expected: Query user, migrate `migration` = True (default), `non_interactive` = True, Expected: No query, migrate `migration` = False, `non_interactive` = False (default), Expected: No query, no migrate `migration` = False, `non_interactive` = True, Expected: No query, no migrate """ archive = get_archive_file('export_v0.1_simple.aiida', filepath=self.archive_path) success_message = f'Success: imported archive {archive}' # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present # `migration` = True (default), `non_interactive` = False (default), Expected: Query user, migrate options = ['--migration', archive] result = self.cli_runner.invoke(cmd_archive.import_archive, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertIn('trying migration', result.output, msg=result.exception) self.assertIn(success_message, result.output, msg=result.exception) # Import using non-interactive, make sure confirm message has gone # `migration` = True (default), `non_interactive` = True, Expected: No query, migrate options = ['--non-interactive', archive] result = self.cli_runner.invoke(cmd_archive.import_archive, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertIn(success_message, result.output, msg=result.exception) # Import using `--no-migration`, make sure confirm message has gone # `migration` = False, `non_interactive` = False (default), Expected: No query, no migrate options = ['--no-migration', archive] result = self.cli_runner.invoke(cmd_archive.import_archive, options) self.assertIsNotNone(result.exception, msg=result.output) self.assertNotEqual(result.exit_code, 0, msg=result.output) self.assertNotIn('trying migration', result.output, msg=result.exception) self.assertNotIn(success_message, result.output, msg=result.exception) # Import using `--no-migration` and `--non-interactive`, make sure confirm message has gone # `migration` = False, `non_interactive` = True, Expected: No query, no migrate options = ['--no-migration', '--non-interactive', archive] result = self.cli_runner.invoke(cmd_archive.import_archive, options) self.assertIsNotNone(result.exception, msg=result.output) self.assertNotEqual(result.exit_code, 0, msg=result.output) self.assertNotIn('trying migration', result.output, msg=result.exception) self.assertNotIn(success_message, result.output, msg=result.exception)
def test_inspect_empty_archive(self): """Test the functionality of `verdi export inspect` for an empty archive.""" filename_input = get_archive_file('empty.aiida', filepath=self.fixture_archive) options = [filename_input] result = self.cli_runner.invoke(cmd_archive.inspect, options) self.assertIsNotNone(result.exception, result.output) self.assertIn('corrupt archive', result.output)
def test_comment_mode(self): """Test toggling comment mode flag""" archives = [get_archive_file(self.newest_archive, filepath=self.archive_path)] for mode in {'newest', 'overwrite'}: options = ['--comment-mode', mode] + archives result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, result.output) self.assertIn('Comment mode: {}'.format(mode), result.output) self.assertEqual(result.exit_code, 0, result.output)
def test_wrong_versions(self, core_archive, tmp_path, version): """Test correct errors are raised if archive files have wrong version numbers""" filepath_archive = get_archive_file('export_v0.1_simple.aiida', **core_archive) migrator_cls = get_migrator(detect_archive_type(filepath_archive)) migrator = migrator_cls(filepath_archive) with pytest.raises(ArchiveMigrationError, match='No migration pathway available'): migrator.migrate(version, tmp_path / 'out.aiida') assert not (tmp_path / 'out.aiida').exists()
def test_migrate_versions_recent(self): """Migrating an archive with the current version should exit with non-zero status.""" filename_input = get_archive_file(self.newest_archive, filepath=self.fixture_archive) filename_output = next(tempfile._get_candidate_names()) # pylint: disable=protected-access try: options = [filename_input, filename_output] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertIsNotNone(result.exception) finally: delete_temporary_file(filename_output)
def test_migrate_tar_gz(self): """Test that -F/--archive-format option can be used to write a tar.gz instead.""" filename_input = get_archive_file(self.penultimate_archive, filepath=self.fixture_archive) filename_output = next(tempfile._get_candidate_names()) # pylint: disable=protected-access for option in ['-F', '--archive-format']: try: options = [option, 'tar.gz', filename_input, filename_output] result = self.cli_runner.invoke(cmd_archive.migrate, options) self.assertIsNone(result.exception, result.output) self.assertTrue(os.path.isfile(filename_output)) self.assertTrue(tarfile.is_tarfile(filename_output)) finally: delete_temporary_file(filename_output)
def test_import_old_local_archives(self): """ Test import of old local archives Expected behavior: Automatically migrate to newest version and import correctly. """ archives = [] for version in range(1, int(EXPORT_VERSION.split('.')[-1]) - 1): archives.append(('export_v0.{}_simple.aiida'.format(version), '0.{}'.format(version))) for archive, version in archives: options = [get_archive_file(archive, filepath=self.archive_path)] result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertIn(version, result.output, msg=result.exception) self.assertIn('Success: imported archive {}'.format(options[0]), result.output, msg=result.exception)
def test_migrate_v5_to_v6_calc_states(core_archive, migrate_from_func): """Test the data migration of legacy `JobCalcState` attributes. This test has to use a local archive because the current archive from the `aiida-export-migration-tests` module does not include a `CalcJobNode` with a legacy `state` attribute. """ # Get metadata.json and data.json as dicts from v0.5 file archive archive_path = get_archive_file('export_v0.5_simple.aiida', **core_archive) metadata, data = read_json_files(archive_path) # pylint: disable=unbalanced-tuple-unpacking verify_metadata_version(metadata, version='0.5') calc_job_node_type = 'process.calculation.calcjob.CalcJobNode.' node_data = data['export_data'].get('Node', {}) node_attributes = data['node_attributes'] calc_jobs = {} for pk, values in node_data.items(): if values['node_type'] == calc_job_node_type and 'state' in data[ 'node_attributes'].get(pk, {}): calc_jobs[pk] = data['node_attributes'][pk]['state'] # Migrate to v0.6 metadata, data = migrate_from_func('export_v0.5_simple.aiida', '0.5', '0.6', migrate_v5_to_v6, core_archive) verify_metadata_version(metadata, version='0.6') node_attributes = data['node_attributes'] # The export archive contains a single `CalcJobNode` that had `state=FINISHED`. for pk, state in calc_jobs.items(): attributes = node_attributes[pk] if STATE_MAPPING[state].exit_status is not None: assert attributes['exit_status'] == STATE_MAPPING[ state].exit_status if STATE_MAPPING[state].process_state is not None: assert attributes['process_state'] == STATE_MAPPING[ state].process_state if STATE_MAPPING[state].process_status is not None: assert attributes['process_status'] == STATE_MAPPING[ state].process_status assert attributes['process_label'] == 'Legacy JobCalculation'
def test_comment_mode(self): """Test toggling comment mode flag""" import re archives = [ get_archive_file(self.newest_archive, filepath=self.archive_path) ] for mode in ['newest', 'overwrite']: options = ['--comment-mode', mode] + archives result = self.cli_runner.invoke(cmd_import.cmd_import, options) self.assertIsNone(result.exception, result.output) self.assertTrue(any([ re.fullmatch(r'Comment rules[\s]*{}'.format(mode), line) for line in result.output.split('\n') ]), msg=f'Mode: {mode}. Output: {result.output}') self.assertEqual(result.exit_code, 0, result.output)
def test_migrate_recursively(self): """Test function 'migrate_recursively'""" import tarfile import zipfile from aiida.common.exceptions import NotExistent from aiida.common.folders import SandboxFolder from aiida.common.json import load as jsonload from aiida.tools.importexport.common.archive import extract_tar, extract_zip # Get metadata.json and data.json as dicts from v0.1 file archive # Cannot use 'get_json_files' for 'export_v0.1_simple.aiida', # because we need to pass the SandboxFolder to 'migrate_recursively' dirpath_archive = get_archive_file('export_v0.1_simple.aiida', **self.core_archive) with SandboxFolder(sandbox_in_repo=False) as folder: if zipfile.is_zipfile(dirpath_archive): extract_zip(dirpath_archive, folder, silent=True) elif tarfile.is_tarfile(dirpath_archive): extract_tar(dirpath_archive, folder, silent=True) else: raise ValueError( 'invalid file format, expected either a zip archive or gzipped tarball' ) try: with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data = jsonload(fhandle) with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = jsonload(fhandle) except IOError: raise NotExistent( 'export archive does not contain the required file {}'. format(fhandle.filename)) verify_metadata_version(metadata, version='0.1') # Migrate to newest version new_version = migrate_recursively(metadata, data, folder) verify_metadata_version(metadata, version=newest_version) self.assertEqual(new_version, newest_version)
def test_migrate_silent(self): """Test that the captured output is an empty string when the -s/--silent option is passed.""" filename_input = get_archive_file(self.penultimate_archive, filepath=self.fixture_archive) filename_output = next(tempfile._get_candidate_names()) # pylint: disable=protected-access for option in ['-s', '--silent']: try: options = [option, filename_input, filename_output] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertEqual(result.output, '') self.assertIsNone(result.exception, result.output) self.assertTrue(os.path.isfile(filename_output)) self.assertEqual( zipfile.ZipFile(filename_output).testzip(), None) finally: delete_temporary_file(filename_output)
def test_full_migration(self, tmp_path, core_archive): """Test a migration from the first to newest archive version.""" filepath_archive = get_archive_file('export_v0.1_simple.aiida', **core_archive) metadata = read_json_files(filepath_archive, names=['metadata.json'])[0] verify_metadata_version(metadata, version='0.1') migrator_cls = get_migrator(detect_archive_type(filepath_archive)) migrator = migrator_cls(filepath_archive) migrator.migrate(newest_version, tmp_path / 'out.aiida') assert detect_archive_type(tmp_path / 'out.aiida') == 'zip' metadata = read_json_files(tmp_path / 'out.aiida', names=['metadata.json'])[0] verify_metadata_version(metadata, version=newest_version)
def test_tar_migration(self, tmp_path, core_archive): """Test a migration using a tar compressed in/out file.""" filepath_archive = get_archive_file('export_v0.2_simple.tar.gz', **core_archive) metadata = read_json_files(filepath_archive, names=['metadata.json'])[0] verify_metadata_version(metadata, version='0.2') migrator_cls = get_migrator(detect_archive_type(filepath_archive)) migrator = migrator_cls(filepath_archive) migrator.migrate(newest_version, tmp_path / 'out.aiida', out_compression='tar.gz') assert detect_archive_type(tmp_path / 'out.aiida') == 'tar.gz' metadata = read_json_files(tmp_path / 'out.aiida', names=['metadata.json'])[0] verify_metadata_version(metadata, version=newest_version)
def migration_data(request): """For a given tuple of two subsequent versions and corresponding migration method, return metadata and data.""" version_old, version_new, migration_method = request.param filepath_archive = 'export_v{}_simple.aiida'.format(version_new) metadata_new, data_new = get_json_files(filepath_archive, filepath='export/migrate') verify_metadata_version(metadata_new, version=version_new) filepath_archive = get_archive_file( 'export_v{}_simple.aiida'.format(version_old), filepath='export/migrate') with Archive(filepath_archive) as archive: metadata_old = copy.deepcopy(archive.meta_data) data_old = copy.deepcopy(archive.data) migration_method(metadata_old, data_old, archive.folder) verify_metadata_version(metadata_old, version=version_new) yield version_old, version_new, metadata_old, metadata_new, data_old, data_new
def test_migrate_force(self): """Test that passing the -f/--force option will overwrite the output file even if it exists.""" filename_input = get_archive_file(self.penultimate_archive, filepath=self.fixture_archive) # Using the context manager will create the file and so the command should fail with tempfile.NamedTemporaryFile() as file_output: options = [filename_input, file_output.name] result = self.cli_runner.invoke(cmd_archive.migrate, options) self.assertIsNotNone(result.exception) for option in ['-f', '--force']: # Using the context manager will create the file, but we pass the force flag so it should work with tempfile.NamedTemporaryFile() as file_output: filename_output = file_output.name options = [option, filename_input, filename_output] result = self.cli_runner.invoke(cmd_archive.migrate, options) self.assertIsNone(result.exception, result.output) self.assertTrue(os.path.isfile(filename_output)) self.assertEqual( zipfile.ZipFile(filename_output).testzip(), None)
def test_migrate_versions_old(self): """Migrating archives with a version older than the current should work.""" archives = [] for version in range(1, int(EXPORT_VERSION.split('.')[-1]) - 1): archives.append('export_v0.{}_simple.aiida'.format(version)) for archive in archives: filename_input = get_archive_file(archive, filepath=self.fixture_archive) filename_output = next(tempfile._get_candidate_names()) # pylint: disable=protected-access try: options = [filename_input, filename_output] result = self.cli_runner.invoke(cmd_export.migrate, options) self.assertIsNone(result.exception, result.output) self.assertTrue(os.path.isfile(filename_output)) self.assertEqual( zipfile.ZipFile(filename_output).testzip(), None) finally: delete_temporary_file(filename_output)