Exemple #1
0
 def test_prepare(self):
     calc, inp = self._get_calc()
     with SandboxFolder() as sf:
         ci = calc._prepare_for_submission(sf, inp)
         il = sf.get_content_list()
     self.assertEquals(
         set(il), {
             'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CHGCAR', 'WAVECAR',
             'wannier90.win', 'test1', 'test2'
         })
     self.assertIn(['wannier90*', '.', 0], ci.retrieve_list)
     calc.use_settings(Common.settings())
     inp = calc.get_inputs_dict()
     calc.verify_inputs(inp)
     with SandboxFolder() as sf:
         calc._prepare_for_submission(sf, inp)
         il = sf.get_content_list()
     self.assertEquals(
         set(il), {
             'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'WAVECAR',
             'wannier90.win', 'test1', 'test2'
         })
     calc, inp = self._get_calc(no_wdat=True)
     with SandboxFolder() as sf:
         ci = calc._prepare_for_submission(sf, inp)
         il = sf.get_content_list()
     self.assertEquals(
         set(il), {
             'INCAR', 'KPOINTS', 'POSCAR', 'POTCAR', 'CHGCAR', 'WAVECAR',
             'wannier90.win'
         })
Exemple #2
0
    def test_dangling_link_to_existing_db_node(self, temp_dir):
        """A dangling link that references a Node that is not included in the archive should `not` be importable"""
        struct = orm.StructureData()
        struct.store()
        struct_uuid = struct.uuid

        calc = orm.CalculationNode()
        calc.add_incoming(struct, LinkType.INPUT_CALC, 'input')
        calc.store()
        calc.seal()
        calc_uuid = calc.uuid

        filename = os.path.join(temp_dir, 'export.aiida')
        export([struct], filename=filename, file_format='tar.gz')

        unpack = SandboxFolder()
        with tarfile.open(filename, 'r:gz', format=tarfile.PAX_FORMAT) as tar:
            tar.extractall(unpack.abspath)

        with open(unpack.get_abs_path('data.json'), 'r',
                  encoding='utf8') as fhandle:
            data = json.load(fhandle)
        data['links_uuid'].append({
            'output': calc.uuid,
            'input': struct.uuid,
            'label': 'input',
            'type': LinkType.INPUT_CALC.value
        })

        with open(unpack.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle)

        with tarfile.open(filename, 'w:gz', format=tarfile.PAX_FORMAT) as tar:
            tar.add(unpack.abspath, arcname='')

        # Make sure the CalculationNode is still in the database
        builder = orm.QueryBuilder().append(orm.CalculationNode,
                                            project='uuid')
        self.assertEqual(
            builder.count(),
            1,
            msg=
            f'There should be a single CalculationNode, instead {builder.count()} has been found'
        )
        self.assertEqual(builder.all()[0][0], calc_uuid)

        with self.assertRaises(DanglingLinkError):
            import_data(filename)

        # Using the flag `ignore_unknown_nodes` should import it without problems
        import_data(filename, ignore_unknown_nodes=True)
        builder = orm.QueryBuilder().append(orm.StructureData, project='uuid')
        self.assertEqual(
            builder.count(),
            1,
            msg=
            f'There should be a single StructureData, instead {builder.count()} has been found'
        )
        self.assertEqual(builder.all()[0][0], struct_uuid)
Exemple #3
0
def immigrate_existing(builder, remote_data, seal=True):
    """Immigrate a Calculation that was not run using AiiDa.

    :param builder: a populated builder instance for a CalcJob
    :type builder: aiida.engine.processes.builder.ProcessBuilder
    :param remote_data: a remote data folder,
        containing the output files required for parsing
    :type remote_data: aiida.orm.RemoteData
    :param seal: whether to seal the calc node, from further attribute changes
    :type seal: bool

    :rtype: aiida.orm.CalcJobNode

    """
    # initialise calcjob
    runner = get_manager().get_runner()
    pw_calc_cls = builder._process_class
    process = instantiate_process(runner, pw_calc_cls, **builder)
    calc_node = process.node

    # prepare for submission
    with SandboxFolder() as temp_folder:
        calc_info = process.presubmit(temp_folder)  # noqa F841
        calc_node.put_object_from_tree(temp_folder.abspath, force=True)

    # link remote folder to calc_node
    if not remote_data.is_stored:
        remote_data.store()
    remote_data.add_incoming(
        calc_node, link_type=LinkType.CREATE, link_label="remote_folder"
    )
    calc_node.set_remote_workdir(remote_data.get_remote_path())
    transport = remote_data.computer.get_transport()

    with SandboxFolder() as temp_retrieved:
        # retrieved output files
        retrieve_calculation(calc_node, transport, temp_retrieved.abspath)
        # parse output
        calc_node.set_state(CalcJobState.PARSING)
        exit_code = process.parse(temp_retrieved.abspath)
    # link outgoing nodes
    process.update_outputs()

    # finalise calc node
    calc_node.delete_state()
    calc_node.delete_checkpoint()
    calc_node.set_process_state(ProcessState.FINISHED)
    calc_node.set_exit_status(exit_code.status)
    calc_node.set_exit_message(exit_code.message)
    if seal:
        calc_node.seal()

    # record that the node was created via immigration
    calc_node.set_extra("immigrated", True)
    calc_node.set_extra("immigration_func", __name__)

    return calc_node
Exemple #4
0
    def _get_temp_folder(self):
        """
        Get the folder of the Node in the temporary repository.

        :return: a SandboxFolder object mapping the node in the repository.
        """
        # I create the temp folder only at is first usage
        if self._temp_folder is None:
            self._temp_folder = SandboxFolder()  # This is also created
            # Create the 'path' subfolder in the Sandbox
            self._get_folder_pathsubfolder.create()
        return self._temp_folder
    def test_3(self):
        """
        Test importing of nodes, that have links to unknown nodes.
        """
        import json
        import tarfile
        import os
        import shutil
        import tempfile

        from aiida.orm import DataFactory
        from aiida.orm.importexport import export
        from aiida.common.folders import SandboxFolder

        # Creating a folder for the import/export files
        temp_folder = tempfile.mkdtemp()
        try:
            StructureData = DataFactory('structure')
            sd = StructureData()
            sd.store()

            filename = os.path.join(temp_folder, "export.tar.gz")
            export([sd.dbnode], outfile=filename, silent=True)

            unpack = SandboxFolder()
            with tarfile.open(filename, "r:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.extractall(unpack.abspath)

            with open(unpack.get_abs_path('data.json'), 'r') as f:
                metadata = json.load(f)
            metadata['links_uuid'].append({
                'output': sd.uuid,
                'input': 'non-existing-uuid',
                'label': 'parent'
            })
            with open(unpack.get_abs_path('data.json'), 'w') as f:
                json.dump(metadata, f)

            with tarfile.open(filename, "w:gz",
                              format=tarfile.PAX_FORMAT) as tar:
                tar.add(unpack.abspath, arcname="")

            self.clean_db()

            with self.assertRaises(ValueError):
                import_data(filename, silent=True)

            import_data(filename, ignore_unknown_nodes=True, silent=True)
        finally:
            # Deleting the created temporary folder
            shutil.rmtree(temp_folder, ignore_errors=True)
Exemple #6
0
 def test_prepare(self):
     calc, inp = self._get_calc()
     with SandboxFolder() as sf:
         ci = calc._prepare_for_submission(sf, inp)
     self.assertIn('CHGCAR', ci.retrieve_list)
     self.assertIn('WAVECAR', ci.retrieve_list)
     self.assertIn('IBZKPT', ci.retrieve_list)
Exemple #7
0
    def test_import_folder(self):
        """Verify a pre-extracted archive (aka. a folder with the archive structure) can be imported.

        It is important to check that the source directory or any of its contents are not deleted after import.
        """
        from aiida.common.folders import SandboxFolder
        from tests.utils.archives import get_archive_file
        from aiida.tools.importexport.common.archive import extract_zip

        archive = get_archive_file('arithmetic.add.aiida', filepath='calcjob')

        with SandboxFolder() as temp_dir:
            extract_zip(archive, temp_dir, silent=True)

            # Make sure the JSON files and the nodes subfolder was correctly extracted (is present),
            # then try to import it by passing the extracted folder to the import function.
            for name in {'metadata.json', 'data.json', 'nodes'}:
                self.assertTrue(os.path.exists(os.path.join(temp_dir.abspath, name)))

            # Get list of all folders in extracted archive
            org_folders = []
            for dirpath, dirnames, _ in os.walk(temp_dir.abspath):
                org_folders += [os.path.join(dirpath, dirname) for dirname in dirnames]

            import_data(temp_dir.abspath, silent=True)

            # Check nothing from the source was deleted
            src_folders = []
            for dirpath, dirnames, _ in os.walk(temp_dir.abspath):
                src_folders += [os.path.join(dirpath, dirname) for dirname in dirnames]
            self.maxDiff = None  # pylint: disable=invalid-name
            self.assertListEqual(org_folders, src_folders)
def kick_out_corestates_wf(potential_sfd, emin):
    """
    Workfunction that kicks out all core states from single file data potential that are higher than emin.
    :param potential_sfd: SingleFileData type of potential
    :param emin: Energy threshold above which all core states are removed from potential (Float)
    :returns: potential without core states higher than emin (SingleFileData)
    """
    from aiida.common.folders import SandboxFolder
    from aiida.plugins import DataFactory

    SingleFileData = DataFactory('singlefile')

    with SandboxFolder() as tmpdir:
        with tmpdir.open('potential_deleted_core_states', 'w') as potfile_out:
            with potential_sfd.open(potential_sfd.filename) as potfile_in:
                num_deleted = kick_out_corestates(potfile_in, potfile_out,
                                                  emin)
        # store new potential as single file data object
        if num_deleted > 0:
            with tmpdir.open('potential_deleted_core_states') as potfile_out:
                potential_nocore_sfd = SingleFileData(file=potfile_out)

    # return potential
    if num_deleted > 0:
        return potential_nocore_sfd
    else:
        return potential_sfd.clone()
Exemple #9
0
    def test_inputs(self):
        import logging

        parameters = ParameterData(dict={
            'INPUTPH': {
                'tr2_ph': 1.0e-8,
            }})

        qpoints = KpointsData()
        qpoints.set_kpoints_mesh([1, 1, 1])
        settings = None

        builder = PhCalc.get_builder()
        builder.code = self.code
        builder.qpoints = qpoints
        builder.settings = settings

        with SandboxFolder() as f:
            # I use the same SandboxFolder more than once because nothing
            # should be written for these failing tests

            # Missing required input nodes
            with self.assertRaises(InputValidationError):
                builder.submit_test(folder=f)
            builder.parameters = parameters

            builder.submit_test(folder=f)
Exemple #10
0
def fixture_remotedata(fixture_localhost, shared_datadir):
    """
    Return a `RemoteData` with contents from the specified directory. Optionally a
    mapping of strings to replace in the filenames can be passed. Note that the order
    of replacement is not guaranteed.
    
    The RemoteData node is yielded and points to a folder in /tmp, and is removed at the end
    """
    from aiida.orm import RemoteData
    from aiida.common.folders import SandboxFolder

    replacement_mapping = {'gaas': 'aiida'}
    dir_path = str(
        shared_datadir /
        'gaas')  # TODO: Remove cast to 'str' when Python2 support is dropped.

    # TODO: replace with tempfile.TemporaryDirectory when Python2 support is
    # dropped. Note that some things will change, e.g. sandbox.abspath
    # becomes tempdir.name, or similary `insert_path` needs to be changed.
    with SandboxFolder() as sandbox:
        remote = RemoteData(remote_path=sandbox.abspath,
                            computer=fixture_localhost)
        for file_path in os.listdir(dir_path):
            abs_path = os.path.abspath(os.path.join(dir_path, file_path))
            res_file_path = file_path
            for old, new in replacement_mapping.items():
                res_file_path = res_file_path.replace(
                    old, new)  # put using correct method
            sandbox.insert_path(abs_path, res_file_path)
        yield remote
    def test_get_or_create(self):
        """Testing the logic or get_or_create"""
        name = "Sr_00.usp"
        with SandboxFolder() as f:
            fp = io.StringIO(u"foo bla 42")
            f.create_file_from_filelike(fp, name, mode='w')
            fpath = os.path.join(f.abspath, name)
            node1, create = self.usp.UspData.get_or_create(fpath)

            self.assertTrue(create)
            self.assertEqual(node1.element, "Sr")

            node2 = self.usp.UspData(file=fpath)
            node2.store()

            # Now having two files - should raise an exception
            with self.assertRaises(ValueError):
                node3, create = self.usp.UspData.get_or_create(fpath,
                                                               use_first=False)

            # This should work now
            node4, create = self.usp.UspData.get_or_create(fpath,
                                                           use_first=True)
            self.assertFalse(create)
            self.assertIn(node4.pk, (node1.pk, node2.pk))
Exemple #12
0
def get_json_files(archive, silent=True, filepath=None, external_module=None):
    """Get metadata.json and data.json from an exported AiiDA archive

    :param archive: the relative filename of the archive
    :param silent: Whether or not the extraction should be silent
    :param filepath: str of directories of where to find archive (starting "/"s are irrelevant)
    :param external_module: string with name of external module, where archive can be found
    """
    # Get archive
    dirpath_archive = get_archive_file(archive, filepath=filepath, external_module=external_module)

    # Unpack archive
    with SandboxFolder(sandbox_in_repo=False) as folder:
        if zipfile.is_zipfile(dirpath_archive):
            extract_zip(dirpath_archive, folder, silent=silent)
        elif tarfile.is_tarfile(dirpath_archive):
            extract_tar(dirpath_archive, folder, silent=silent)
        else:
            raise ValueError('invalid file format, expected either a zip archive or gzipped tarball')

        try:
            with io.open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
                data = json.load(fhandle)
            with io.open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle:
                metadata = json.load(fhandle)
        except IOError:
            raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename))

    # Return metadata.json and data.json
    return metadata, data
Exemple #13
0
 def test_prepare(self):
     calc, inp = self._get_calc()
     with SandboxFolder() as sf:
         ci = calc._prepare_for_submission(sf, inp)
         il = sf.get_content_list()
     self.assertEquals(set(il), {'wannier90.win', 'test1', 'test2'})
     self.assertIn(['wannier90*', '.', 0], ci.retrieve_list)
Exemple #14
0
    def test_2(self):
        stdout_messages = ["data_test _cell_length_a 10(1)"]
        stderr_messages = ["first line", "last line"]

        f = SandboxFolder()
        stdout_file = "{}/{}".format(f.abspath, "aiida.out")
        stderr_file = "{}/{}".format(f.abspath, "aiida.err")

        with open(stdout_file, 'w') as of:
            of.write("\n".join(stdout_messages))
            of.flush()

        with open(stderr_file, 'w') as ef:
            ef.write("\n".join(stderr_messages))
            ef.flush()

        parser = CiffilterParser(CiffilterCalculation())
        success, nodes = parser._get_output_nodes(stdout_file, stderr_file)

        self.assertEquals(success, True)
        self.assertEquals(len(nodes), 2)
        self.assertEquals(nodes[0][0], 'cif')
        self.assertEquals(isinstance(nodes[0][1], CifData), True)
        self.assertEquals(nodes[0][1].generate_md5(),
                          'b5bb739a254514961a157503daf715eb')
        self.assertEquals(nodes[1][0], 'messages')
        self.assertEquals(isinstance(nodes[1][1], ParameterData), True)
Exemple #15
0
    def test_3(self):
        stdout_messages = ["NOTE, symmetry operator '-x,-y,-z' is missing"]
        stderr_messages = [
            "ERROR, tag '_refine_ls_shift/esd_max' value '0.25' is > 0.2."
        ]

        f = SandboxFolder()
        stdout_file = "{}/{}".format(f.abspath, "aiida.out")
        stderr_file = "{}/{}".format(f.abspath, "aiida.err")

        with open(stdout_file, 'w') as of:
            of.write("aiida.in: OK\n")
            of.write("\n".join(stdout_messages))
            of.flush()

        with open(stderr_file, 'w') as ef:
            ef.write("\n".join(stderr_messages))
            ef.flush()

        parser = CifcodcheckParser(CifcodcheckCalculation())
        success, nodes = parser._get_output_nodes(stdout_file, stderr_file)

        self.assertEquals(success, True)
        self.assertEquals(len(nodes), 1)
        self.assertEquals(nodes[0][0], 'messages')
        self.assertEquals(isinstance(nodes[0][1], ParameterData), True)
        self.assertEquals(nodes[0][1].get_dict()['output_messages'],
                          stdout_messages + stderr_messages)
Exemple #16
0
    def test_folder(self):
        # create directories for the Folder test
        with SandboxFolder(sandbox_in_repo=False) as folder:
            folder.open('file1', 'a').close()
            fhandle = folder.open('file2', 'w')
            fhandle.write('hello there!\n')
            fhandle.close()

            folder_hash = make_hash(folder)
            self.assertEqual(folder_hash, '47d9cdb2247e75eca492035f60f09fdd0daf87bbba40bb658d2d7e84f21f26c5')

            nested_obj = ['1.0.0a2', {'array|a': [1001]}, folder, None]
            self.assertEqual(make_hash(nested_obj), 'd3e7ff24708bc60b75a01571454ac0a664fa94ff2145848b584fb9ecc7e4fcbe')

            with folder.open('file3.npy', 'wb') as fhandle:
                np.save(fhandle, np.arange(10))

            # after adding a file, the folder hash should have changed
            self.assertNotEqual(make_hash(folder), folder_hash)
            # ... unless we explicitly tell it to ignore the new file
            self.assertEqual(make_hash(folder, ignored_folder_content='file3.npy'), folder_hash)

            subfolder = folder.get_subfolder('some_subdir', create=True)

            with subfolder.open('file4.npy', 'wb') as fhandle:
                np.save(fhandle, np.arange(5))

            self.assertNotEqual(make_hash(folder), folder_hash)
            self.assertEqual(make_hash(folder, ignored_folder_content=['file3.npy', 'some_subdir']), folder_hash)
Exemple #17
0
    def test_perl_error_detection(self):
        from aiida_codtools.parsers.cifcellcontents import CifcellcontentsParser
        from aiida.common.exceptions import PluginInternalError

        stdout = "4000000	C26 H26 Fe\n"

        stderr_1 = "Can't locate CIFSymmetryGenerator.pm in @INC (@INC contains: .) at cif_molecule line 61."
        stderr_2 = "BEGIN failed--compilation aborted at cif_molecule line 61."

        f = SandboxFolder()

        stdout_file = "{}/{}".format(f.abspath, "aiida.out")
        stderr_1_file = "{}/{}".format(f.abspath, "aiida_1.err")
        stderr_2_file = "{}/{}".format(f.abspath, "aiida_2.err")

        with open(stdout_file, 'w') as of:
            of.write(stdout)
            of.flush()
        with open(stderr_1_file, 'w') as ef:
            ef.write(stderr_1)
            ef.flush()
        with open(stderr_2_file, 'w') as ef:
            ef.write(stderr_2)
            ef.flush()

        parser = CifcellcontentsParser(CifcellcontentsCalculation())
        with self.assertRaises(PluginInternalError):
            parser._get_output_nodes(stdout_file, stderr_1_file)
        with self.assertRaises(PluginInternalError):
            parser._get_output_nodes(stdout_file, stderr_2_file)
Exemple #18
0
def test_export_tree():
    """Check `what` in export_tree()"""
    from aiida.common.folders import SandboxFolder

    what = []

    with SandboxFolder() as folder:
        with pytest.warns(
                AiidaDeprecationWarning,
                match='`what` is deprecated, please use `entities` instead'):
            dbexport.export_tree(what=what, folder=folder)

        folder.erase(create_empty_folder=True)
        with pytest.warns(
                AiidaDeprecationWarning,
                match=
                '`what` is deprecated, the supplied `entities` input will be used'
        ):
            dbexport.export_tree(entities=what, what=what, folder=folder)

        folder.erase(create_empty_folder=True)
        with pytest.raises(TypeError, match='`entities` must be specified'):
            dbexport.export_tree(folder=folder)

        folder.erase(create_empty_folder=True)
        with pytest.raises(TypeError, match='`folder` must be specified'):
            dbexport.export_tree(entities=what)
def test_export_tree():
    """Check `what` in export_tree()"""
    from aiida.common.folders import SandboxFolder

    with warnings.catch_warnings():  # To avoid printing them in output (pytest.mark.filterwarnings does not work)
        warnings.filterwarnings('ignore', category=AiidaDeprecationWarning)

        what = []

        with SandboxFolder() as folder:
            with pytest.warns(AiidaDeprecationWarning, match='`what` is deprecated, please use `entities` instead'):
                dbexport.export_tree(what=what, folder=folder)

            folder.erase(create_empty_folder=True)
            with pytest.warns(
                AiidaDeprecationWarning, match='`what` is deprecated, the supplied `entities` input will be used'
            ):
                dbexport.export_tree(entities=what, what=what, folder=folder)

            folder.erase(create_empty_folder=True)
            with pytest.raises(TypeError, match='`entities` must be specified'):
                dbexport.export_tree(folder=folder)

            folder.erase(create_empty_folder=True)
            with pytest.raises(TypeError, match='`folder` must be specified'):
                dbexport.export_tree(entities=what)
def test_submit(new_database, new_workdir):
    """Test submitting a calculation"""
    from aiida.orm.data.singlefile import SinglefileData
    from aiida.common.folders import SandboxFolder

    code = get_basic_code(new_workdir)

    # Prepare input parameters
    infile = SinglefileData(
        file=os.path.join(TEST_DIR, "input_files",
                          'mgo_sto3g_scf.crystal.d12'))

    # set up calculation
    calc = code.new_calc()
    # calc.label = "aiida_crystal17 test"
    # calc.description = "Test job submission with the aiida_crystal17 plugin"
    # calc.set_max_wallclock_seconds(30)
    calc.set_withmpi(False)
    calc.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1})

    calc.use_input_file(infile)

    calc.store_all()

    # output input files and scripts to temporary folder
    with SandboxFolder() as folder:
        subfolder, script_filename = calc.submit_test(folder=folder)
        print("inputs created successfully at {}".format(subfolder.abspath))
Exemple #21
0
    def run(self):
        import plumpy
        from aiida.engine.processes.calcjobs.tasks import RETRIEVE_COMMAND
        from aiida.common.folders import SandboxFolder

        _ = super(VaspImmigrant, self).run()

        # Make sure the retrieve list is set (done in presubmit so we need to call that also)
        with SandboxFolder() as folder:
            self.presubmit(folder)

        settings = self.inputs.get('settings', None)
        settings = settings.get_dict() if settings else {}
        remote_path = settings.get('import_from_path', None)
        if not remote_path:
            raise InputValidationError(
                'immigrant calculations need an input "settings" containing a key "import_from_path"!'
            )
        self.node.set_remote_workdir(remote_path)  # pylint: disable=protected-access
        remotedata = get_data_node('remote',
                                   computer=self.node.computer,
                                   remote_path=remote_path)
        remotedata.add_incoming(self.node,
                                link_type=LinkType.CREATE,
                                link_label='remote_folder')
        remotedata.store()

        return plumpy.Wait(msg='Waiting to retrieve', data=RETRIEVE_COMMAND)
Exemple #22
0
def export_tar(entities=None, filename=None, **kwargs):
    """Export the entries passed in the 'entities' list to a gzipped tar file.

    .. deprecated:: 1.2.1
        Support for the parameters `what` and `outfile` will be removed in `v2.0.0`.
        Please use `entities` and `filename` instead, respectively.

    :param entities: a list of entity instances; they can belong to different models/entities.
    :type entities: list

    :param filename: the filename (possibly including the absolute path) of the file on which to export.
    :type filename: str
    """
    # Backwards-compatibility
    entities = deprecated_parameters(
        old={
            'name': 'what',
            'value': kwargs.pop('what', None)
        },
        new={
            'name': 'entities',
            'value': entities
        },
    )
    filename = deprecated_parameters(
        old={
            'name': 'outfile',
            'value': kwargs.pop('outfile', None)
        },
        new={
            'name': 'filename',
            'value': filename
        },
    )

    type_check(
        entities, (list, tuple, set),
        msg='`entities` must be specified and given as a list of AiiDA entities'
    )
    entities = list(entities)

    if type_check(filename, str, allow_none=True) is None:
        filename = 'export_data.aiida'

    with SandboxFolder() as folder:
        time_export_start = time.time()
        export_tree(entities=entities, folder=folder, **kwargs)
        time_export_end = time.time()

        with tarfile.open(filename,
                          'w:gz',
                          format=tarfile.PAX_FORMAT,
                          dereference=True) as tar:
            time_compress_start = time.time()
            tar.add(folder.abspath, arcname='')
            time_compress_end = time.time()

    return (time_export_start, time_export_end, time_compress_start,
            time_compress_end)
    def test_missing_node_repo_folder_import(self, temp_dir):
        """
        Make sure `~aiida.tools.importexport.common.exceptions.CorruptArchive` is raised during import when missing
        Node repository folder.
        Create and export a Node and manually remove its repository folder in the export file.
        Attempt to import it and make sure `~aiida.tools.importexport.common.exceptions.CorruptArchive` is raised,
        due to the missing folder.
        """
        import tarfile

        from aiida.common.folders import SandboxFolder
        from aiida.tools.importexport.common.archive import extract_tar
        from aiida.tools.importexport.common.config import NODES_EXPORT_SUBFOLDER
        from aiida.tools.importexport.common.utils import export_shard_uuid

        node = orm.CalculationNode().store()
        node.seal()
        node_uuid = node.uuid

        node_repo = RepositoryFolder(section=Repository._section_name, uuid=node_uuid)  # pylint: disable=protected-access
        self.assertTrue(
            node_repo.exists(), msg='Newly created and stored Node should have had an existing repository folder'
        )

        # Export and reset db
        filename = os.path.join(temp_dir, 'export.aiida')
        export([node], filename=filename, file_format='tar.gz', silent=True)
        self.reset_database()

        # Untar export file, remove repository folder, re-tar
        node_shard_uuid = export_shard_uuid(node_uuid)
        node_top_folder = node_shard_uuid.split('/')[0]
        with SandboxFolder() as folder:
            extract_tar(filename, folder, silent=True, nodes_export_subfolder=NODES_EXPORT_SUBFOLDER)
            node_folder = folder.get_subfolder(os.path.join(NODES_EXPORT_SUBFOLDER, node_shard_uuid))
            self.assertTrue(
                node_folder.exists(), msg="The Node's repository folder should still exist in the export file"
            )

            # Removing the Node's repository folder from the export file
            shutil.rmtree(
                folder.get_subfolder(os.path.join(NODES_EXPORT_SUBFOLDER, node_top_folder)).abspath, ignore_errors=True
            )
            self.assertFalse(
                node_folder.exists(),
                msg="The Node's repository folder should now have been removed in the export file"
            )

            filename_corrupt = os.path.join(temp_dir, 'export_corrupt.aiida')
            with tarfile.open(filename_corrupt, 'w:gz', format=tarfile.PAX_FORMAT, dereference=True) as tar:
                tar.add(folder.abspath, arcname='')

        # Try to import, check it raises and check the raise message
        with self.assertRaises(exceptions.CorruptArchive) as exc:
            import_data(filename_corrupt, silent=True)

        self.assertIn(
            'Unable to find the repository folder for Node with UUID={}'.format(node_uuid), str(exc.exception)
        )
Exemple #24
0
def aiida_sandbox():
    """
    Create and return an AiiDA sandbox folder (used for instance as argument
    in the prepare_for_submission() calculator methods
    """
    from aiida.common.folders import SandboxFolder
    with SandboxFolder() as sandbox:
        yield sandbox
Exemple #25
0
 def test_prepare(self):
     """Check that preparing creates all necessary files"""
     calc, inp = self._get_calc()
     with SandboxFolder() as sandbox_f:
         calc_info = calc._prepare_for_submission(sandbox_f, inp)
         inputs = sandbox_f.get_content_list()
     self.assertEquals(set(inputs), {'wannier90.win', 'test1', 'test2'})
     self.assertIn(['wannier90*', '.', 0], calc_info.retrieve_list)
Exemple #26
0
 def test_prepare(self):
     """Check that preparing writes all necessary files"""
     calc, inp = self._get_calc()
     with SandboxFolder() as sandbox_f:
         calc_info = calc._prepare_for_submission(sandbox_f, inp)
     self.assertIn('CHGCAR', calc_info.retrieve_list)
     self.assertIn('WAVECAR', calc_info.retrieve_list)
     self.assertIn('IBZKPT', calc_info.retrieve_list)
Exemple #27
0
    def test_migrate_v3_to_v4(self):
        """Test function migrate_v3_to_v4"""
        from aiida import get_version

        # Get metadata.json and data.json as dicts from v0.4 file archive
        metadata_v4, data_v4 = get_json_files('export_v0.4_simple.aiida', **self.core_archive)
        verify_metadata_version(metadata_v4, version='0.4')

        # Get metadata.json and data.json as dicts from v0.3 file archive
        # Cannot use 'get_json_files' for 'export_v0.3_simple.aiida',
        # because we need to pass the SandboxFolder to 'migrate_v3_to_v4'
        dirpath_archive = get_archive_file('export_v0.3_simple.aiida', **self.core_archive)

        with SandboxFolder(sandbox_in_repo=False) as folder:
            if zipfile.is_zipfile(dirpath_archive):
                extract_zip(dirpath_archive, folder, silent=True)
            elif tarfile.is_tarfile(dirpath_archive):
                extract_tar(dirpath_archive, folder, silent=True)
            else:
                raise ValueError('invalid file format, expected either a zip archive or gzipped tarball')

            try:
                with io.open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle:
                    data_v3 = jsonload(fhandle)
                with io.open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle:
                    metadata_v3 = jsonload(fhandle)
            except IOError:
                raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename))

            verify_metadata_version(metadata_v3, version='0.3')

            # Migrate to v0.4
            migrate_v3_to_v4(metadata_v3, data_v3, folder)
            verify_metadata_version(metadata_v3, version='0.4')

        # Remove AiiDA version, since this may change irregardless of the migration function
        metadata_v3.pop('aiida_version')
        metadata_v4.pop('aiida_version')

        # Assert conversion message in `metadata.json` is correct and then remove it for later assertions
        self.maxDiff = None  # pylint: disable=invalid-name
        conversion_message = 'Converted from version 0.3 to 0.4 with AiiDA v{}'.format(get_version())
        self.assertEqual(
            metadata_v3.pop('conversion_info')[-1],
            conversion_message,
            msg='The conversion message after migration is wrong'
        )
        metadata_v4.pop('conversion_info')

        # Assert changes were performed correctly
        self.assertDictEqual(
            metadata_v3,
            metadata_v4,
            msg='After migration, metadata.json should equal intended metadata.json from archives'
        )
        self.assertDictEqual(
            data_v3, data_v4, msg='After migration, data.json should equal intended data.json from archives'
        )
    def _get_temp_folder(self):
        """Return the temporary sandbox folder.

        :return: a SandboxFolder object mapping the node in the repository.
        """
        if self._temp_folder is None:
            self._temp_folder = SandboxFolder()

        return self._temp_folder
Exemple #29
0
def test_prepare_for_submission(crystal_calc):
    from aiida.common.folders import SandboxFolder
    # crystal_calc.store_all()
    with SandboxFolder() as folder:
        calcinfo = crystal_calc.prepare_for_submission(folder=folder)
        # with folder.open('INPUT') as f:
        #     print(f.readlines())
    assert crystal_calc._GEOMETRY_FILE_NAME in calcinfo['retrieve_list']
    assert crystal_calc._OUTPUT_FILE_NAME in calcinfo['retrieve_list']
Exemple #30
0
def migrate_archive(input_file, output_file, silent=True):
    """Migrate contents using `migrate_recursively`
    This is essentially similar to `verdi export migrate`.
    However, since this command may be disabled, this function simulates it and keeps the tests working.

    :param input_file: filename with full path for archive to be migrated
    :param output_file: filename with full path for archive to be created after migration
    """
    from aiida.tools.importexport.migration import migrate_recursively

    # Unpack archive, migrate, and re-pack archive
    with SandboxFolder(sandbox_in_repo=False) as folder:
        if zipfile.is_zipfile(input_file):
            extract_zip(input_file, folder, silent=silent)
        elif tarfile.is_tarfile(input_file):
            extract_tar(input_file, folder, silent=silent)
        else:
            raise ValueError(
                'invalid file format, expected either a zip archive or gzipped tarball'
            )

        try:
            with open(folder.get_abs_path('data.json'), 'r',
                      encoding='utf8') as fhandle:
                data = json.load(fhandle)
            with open(folder.get_abs_path('metadata.json'),
                      'r',
                      encoding='utf8') as fhandle:
                metadata = json.load(fhandle)
        except IOError:
            raise NotExistent(
                'export archive does not contain the required file {}'.format(
                    fhandle.filename))

        # Migrate
        migrate_recursively(metadata, data, folder)

        # Write json files
        with open(folder.get_abs_path('data.json'), 'wb') as fhandle:
            json.dump(data, fhandle, indent=4)

        with open(folder.get_abs_path('metadata.json'), 'wb') as fhandle:
            json.dump(metadata, fhandle, indent=4)

        # Pack archive
        compression = zipfile.ZIP_DEFLATED
        with zipfile.ZipFile(output_file,
                             mode='w',
                             compression=compression,
                             allowZip64=True) as archive:
            src = folder.abspath
            for dirpath, dirnames, filenames in os.walk(src):
                relpath = os.path.relpath(dirpath, src)
                for filename in dirnames + filenames:
                    real_src = os.path.join(dirpath, filename)
                    real_dest = os.path.join(relpath, filename)
                    archive.write(real_src, real_dest)