Пример #1
0
class TestVerdiDataTrajectory(AiidaTestCase, DummyVerdiDataListable,
                              DummyVerdiDataExportable):
    """Test verdi data trajectory."""
    @staticmethod
    def create_trajectory_data():
        """Create TrajectoryData object with two arrays."""

        traj = TrajectoryData()

        # I create sample data
        stepids = np.array([60, 70])
        times = stepids * 0.01
        cells = np.array([[[
            2.,
            0.,
            0.,
        ], [
            0.,
            2.,
            0.,
        ], [
            0.,
            0.,
            2.,
        ]], [[
            3.,
            0.,
            0.,
        ], [
            0.,
            3.,
            0.,
        ], [
            0.,
            0.,
            3.,
        ]]])
        symbols = ['H', 'O', 'C']
        positions = np.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]],
                              [[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5,
                                                               1.5]]])
        velocities = np.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]],
                               [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5],
                                [-0.5, -0.5, -0.5]]])

        # I set the node
        traj.set_trajectory(stepids=stepids,
                            cells=cells,
                            symbols=symbols,
                            positions=positions,
                            times=times,
                            velocities=velocities)

        traj.store()

        # Create 2 groups and add the data to one of them
        g_ne = Group(label='non_empty_group')
        g_ne.store()
        g_ne.add_nodes(traj)

        g_e = Group(label='empty_group')
        g_e.store()

        return {
            DummyVerdiDataListable.NODE_ID_STR: traj.id,
            DummyVerdiDataListable.NON_EMPTY_GROUP_ID_STR: g_ne.id,
            DummyVerdiDataListable.EMPTY_GROUP_ID_STR: g_e.id
        }

    @classmethod
    def setUpClass(cls):  # pylint: disable=arguments-differ
        super().setUpClass()
        orm.Computer(name='comp',
                     hostname='localhost',
                     transport_type='local',
                     scheduler_type='direct',
                     workdir='/tmp/aiida').store()
        cls.ids = cls.create_trajectory_data()

    def setUp(self):
        self.comp = self.computer
        self.runner = CliRunner()
        self.this_folder = os.path.dirname(__file__)
        self.this_file = os.path.basename(__file__)

        self.cli_runner = CliRunner()

    def test_showhelp(self):
        res = self.runner.invoke(cmd_trajectory.trajectory_show, ['--help'])
        self.assertIn(
            b'Usage:', res.stdout_bytes,
            'The string "Usage: " was not found in the output'
            ' of verdi data trajecotry show --help')

    def test_list(self):
        self.data_listing_test(
            TrajectoryData, str(self.ids[DummyVerdiDataListable.NODE_ID_STR]),
            self.ids)

    @unittest.skipUnless(has_pycifrw(), 'Unable to import PyCifRW')
    def test_export(self):
        new_supported_formats = list(cmd_trajectory.EXPORT_FORMATS)
        self.data_export_test(TrajectoryData, self.ids, new_supported_formats)
Пример #2
0
class DummyVerdiDataExportable:
    """Test exportable data objects."""

    NODE_ID_STR = 'node_id'
    EMPTY_GROUP_ID_STR = 'empty_group_id'
    EMPTY_GROUP_NAME_STR = 'empty_group'
    NON_EMPTY_GROUP_ID_STR = 'non_empty_group_id'
    NON_EMPTY_GROUP_NAME_STR = 'non_empty_group'

    @unittest.skipUnless(has_pycifrw(), 'Unable to import PyCifRW')
    def data_export_test(self, datatype, ids, supported_formats):
        """This method tests that the data listing works as expected with all
        possible flags and arguments for different datatypes."""
        datatype_mapping = {
            CifData: cmd_cif.cif_export,
            StructureData: cmd_structure.structure_export,
            TrajectoryData: cmd_trajectory.trajectory_export,
        }

        if datatype is None or datatype not in datatype_mapping.keys():
            raise Exception(
                'The listing of the objects {} is not supported'.format(
                    datatype))

        export_cmd = datatype_mapping[datatype]

        # Check that the simple command works as expected
        options = [str(ids[self.NODE_ID_STR])]
        res = self.cli_runner.invoke(export_cmd,
                                     options,
                                     catch_exceptions=False)
        self.assertEqual(res.exit_code, 0,
                         'The command did not finish correctly')

        for flag in ['-F', '--format']:
            for frmt in supported_formats:
                options = [flag, frmt, str(ids[self.NODE_ID_STR])]
                res = self.cli_runner.invoke(export_cmd,
                                             options,
                                             catch_exceptions=False)
                self.assertEqual(
                    res.exit_code, 0, 'The command did not finish '
                    'correctly. Output:\n{}'.format(res.output))

        # Check that the output to file flags work correctly:
        # -o, --output
        output_flags = ['-o', '--output']
        for flag in output_flags:
            try:
                tmpd = tempfile.mkdtemp()
                filepath = os.path.join(tmpd, 'output_file.txt')
                options = [flag, filepath, str(ids[self.NODE_ID_STR])]
                res = self.cli_runner.invoke(export_cmd,
                                             options,
                                             catch_exceptions=False)
                self.assertEqual(
                    res.exit_code, 0, 'The command should finish correctly.'
                    'Output:\n{}'.format(res.output))

                # Try to export it again. It should fail because the
                # file exists
                res = self.cli_runner.invoke(export_cmd,
                                             options,
                                             catch_exceptions=False)
                self.assertNotEqual(
                    res.exit_code, 0,
                    'The command should fail because the file already exists')

                # Now we force the export of the file and it should overwrite
                # existing files
                options = [flag, filepath, '-f', str(ids[self.NODE_ID_STR])]
                res = self.cli_runner.invoke(export_cmd,
                                             options,
                                             catch_exceptions=False)
                self.assertEqual(
                    res.exit_code, 0, 'The command should finish correctly.'
                    'Output: {}'.format(res.output))
            finally:
                shutil.rmtree(tmpd)
Пример #3
0
class TestTcodDbExporter(AiidaTestCase):
    """Tests for TcodDbExporter class."""
    from aiida.orm.nodes.data.structure import has_ase, has_spglib
    from aiida.orm.nodes.data.cif import has_pycifrw

    def test_contents_encoding_1(self):
        """
        Testing the logic of choosing the encoding and the process of
        encoding contents.
        """
        from aiida.tools.dbexporters.tcod import cif_encode_contents
        self.assertEquals(cif_encode_contents(b'simple line')[1], None)
        self.assertEquals(cif_encode_contents(b' ;\n ;')[1], None)
        self.assertEquals(cif_encode_contents(b';\n'),
                          (b'=3B\n', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(b'line\n;line'),
                          (b'line\n=3Bline', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(b'tabbed\ttext'),
                          (b'tabbed=09text', 'quoted-printable'))

        # Angstrom symbol 'Å' will be encoded as two bytes, thus encoding it
        # for CIF will produce two quoted-printable entities, '=C3' and '=85',
        # one for each byte.

        self.assertEquals(cif_encode_contents(u'angstrom Å'.encode('utf-8')),
                          (b'angstrom =C3=85', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(b'.'),
                          (b'=2E', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(b'?'),
                          (b'=3F', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(b'.?'), (b'.?', None))
        # This one is particularly tricky: a long line is folded by the QP
        # and the semicolon sign becomes the first character on a new line.
        self.assertEquals(
            cif_encode_contents(u"Å{};a".format("".join(
                "a" for i in range(0, 69))).encode('utf-8')),
            (b'=C3=85aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
             b'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa=\n=3Ba', 'quoted-printable'))
        self.assertEquals(cif_encode_contents(u'angstrom ÅÅÅ'.encode('utf-8')),
                          (b'YW5nc3Ryb20gw4XDhcOF', 'base64'))
        self.assertEquals(
            cif_encode_contents("".join(
                "a" for i in range(0, 2048)).encode('utf-8'))[1], None)
        self.assertEquals(
            cif_encode_contents("".join(
                "a" for i in range(0, 2049)).encode('utf-8'))[1],
            'quoted-printable')
        self.assertEquals(cif_encode_contents(b'datatest')[1], None)
        self.assertEquals(cif_encode_contents(b'data_test')[1], 'base64')

    def test_collect_files(self):
        """Testing the collection of files from file tree."""
        from aiida.tools.dbexporters.tcod import _collect_files
        from aiida.common.folders import SandboxFolder
        from six.moves import StringIO as StringIO

        sf = SandboxFolder()
        sf.get_subfolder('out', create=True)
        sf.get_subfolder('pseudo', create=True)
        sf.get_subfolder('save', create=True)
        sf.get_subfolder('save/1', create=True)
        sf.get_subfolder('save/2', create=True)

        f = StringIO(u"test")
        sf.create_file_from_filelike(f, 'aiida.in', mode='w')
        f = StringIO(u"test")
        sf.create_file_from_filelike(f, 'aiida.out', mode='w')
        f = StringIO(u"test")
        sf.create_file_from_filelike(f, '_aiidasubmit.sh', mode='w')
        f = StringIO(u"test")
        sf.create_file_from_filelike(f, '_.out', mode='w')
        f = StringIO(u"test")
        sf.create_file_from_filelike(f, 'out/out', mode='w')
        f = StringIO(u"test")
        sf.create_file_from_filelike(f, 'save/1/log.log', mode='w')

        md5 = '098f6bcd4621d373cade4e832627b4f6'
        sha1 = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'
        self.assertEquals(_collect_files(sf.abspath), [{
            'name': '_.out',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': '_aiidasubmit.sh',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': 'aiida.in',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': 'aiida.out',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': 'out/',
            'type': 'folder'
        }, {
            'name': 'out/out',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': 'pseudo/',
            'type': 'folder'
        }, {
            'name': 'save/',
            'type': 'folder'
        }, {
            'name': 'save/1/',
            'type': 'folder'
        }, {
            'name': 'save/1/log.log',
            'contents': b'test',
            'md5': md5,
            'sha1': sha1,
            'type': 'file'
        }, {
            'name': 'save/2/',
            'type': 'folder'
        }])

    @unittest.skipIf(not has_ase(), "Unable to import ase")
    @unittest.skipIf(not has_spglib(), "Unable to import spglib")
    @unittest.skipIf(not has_pycifrw(), "Unable to import PyCifRW")
    def test_cif_structure_roundtrip(self):
        from aiida.tools.dbexporters.tcod import export_cif, export_values
        from aiida.common.folders import SandboxFolder
        import tempfile

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            tmpf.flush()
            a = orm.CifData(filepath=tmpf.name)

        c = a.get_structure()
        c.store()
        pd = orm.Dict()

        code = orm.Code(local_executable='test.sh')
        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write("#/bin/bash\n\necho test run\n")
            tmpf.flush()
            code.put_object_from_filelike(tmpf, 'test.sh')

        code.store()

        calc = orm.CalcJobNode(computer=self.computer)
        calc.set_option('resources', {
            'num_machines': 1,
            'num_mpiprocs_per_machine': 1
        })
        calc.add_incoming(code, LinkType.INPUT_CALC, "code")
        calc.set_option('environment_variables', {
            'PATH': '/dev/null',
            'USER': '******'
        })

        with tempfile.NamedTemporaryFile(mode='w+', prefix="Fe") as tmpf:
            tmpf.write("<UPF version=\"2.0.1\">\nelement=\"Fe\"\n")
            tmpf.flush()
            upf = orm.UpfData(filepath=tmpf.name)
            upf.store()
            calc.add_incoming(upf, LinkType.INPUT_CALC, "upf")

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write("data_test")
            tmpf.flush()
            cif = orm.CifData(filepath=tmpf.name)
            cif.store()
            calc.add_incoming(cif, LinkType.INPUT_CALC, "cif")

        with SandboxFolder() as fhandle:
            calc.put_object_from_tree(fhandle.abspath)
        calc.store()

        fd = orm.FolderData()
        with fd.open('_scheduler-stdout.txt', 'w') as fhandle:
            fhandle.write(u"standard output")

        with fd.open('_scheduler-stderr.txt', 'w') as fhandle:
            fhandle.write(u"standard error")

        fd.store()
        fd.add_incoming(calc, LinkType.CREATE, calc.link_label_retrieved)

        pd.add_incoming(calc, LinkType.CREATE, "create1")
        pd.store()

        with self.assertRaises(ValueError):
            export_cif(c, parameters=pd)

        c.add_incoming(calc, LinkType.CREATE, "create2")
        export_cif(c, parameters=pd)

        values = export_values(c, parameters=pd)
        values = values['0']

        self.assertEquals(values['_tcod_computation_environment'],
                          ['PATH=/dev/null\nUSER=unknown'])
        self.assertEquals(values['_tcod_computation_command'],
                          ['cd 1; ./_aiidasubmit.sh'])

    @unittest.skipIf(not has_ase(), "Unable to import ase")
    @unittest.skipIf(not has_spglib(), "Unable to import spglib")
    @unittest.skipIf(not has_pycifrw(), "Unable to import PyCifRW")
    def test_inline_export(self):
        from aiida.tools.dbexporters.tcod import export_values
        import tempfile

        with tempfile.NamedTemporaryFile(mode='w+') as tmpf:
            tmpf.write('''
                data_test
                _cell_length_a    10
                _cell_length_b    10
                _cell_length_c    10
                _cell_angle_alpha 90
                _cell_angle_beta  90
                _cell_angle_gamma 90
                loop_
                _atom_site_label
                _atom_site_fract_x
                _atom_site_fract_y
                _atom_site_fract_z
                C 0 0 0
                O 0.5 0.5 0.5
            ''')
            tmpf.flush()
            a = orm.CifData(filepath=tmpf.name)

        s = a.get_structure(store=True)
        val = export_values(s)
        script = val.first_block()['_tcod_file_contents'][1]
        function = '_get_aiida_structure_pymatgen_inline'
        self.assertNotEqual(script.find(function), script.rfind(function))

    @unittest.skipIf(not has_ase(), "Unable to import ase")
    @unittest.skipIf(not has_spglib(), "Unable to import spglib")
    @unittest.skipIf(not has_pycifrw(), "Unable to import PyCifRW")
    def test_symmetry_reduction(self):
        from aiida.tools.dbexporters.tcod import export_values
        from ase import Atoms

        a = Atoms('BaTiO3', cell=(4., 4., 4.))
        a.set_scaled_positions((
            (0.0, 0.0, 0.0),
            (0.5, 0.5, 0.5),
            (0.5, 0.5, 0.0),
            (0.5, 0.0, 0.5),
            (0.0, 0.5, 0.5),
        ))

        a.set_chemical_symbols(['Ba', 'Ti', 'O', 'O', 'O'])
        val = export_values(orm.StructureData(ase=a),
                            reduce_symmetry=True,
                            store=True)['0']
        self.assertEqual(val['_atom_site_label'], ['Ba1', 'Ti1', 'O1'])
        self.assertEqual(val['_symmetry_space_group_name_H-M'], 'Pm-3m')
        self.assertEqual(val['_symmetry_space_group_name_Hall'], '-P 4 2 3')

    def test_cmdline_parameters(self):
        """
        Ensuring that neither extend_with_cmdline_parameters() nor
        deposition_cmdline_parameters() set default parameters.
        """
        from aiida.tools.dbexporters.tcod \
            import extend_with_cmdline_parameters, \
            deposition_cmdline_parameters
        import argparse

        parser = argparse.ArgumentParser()
        extend_with_cmdline_parameters(parser)
        options = vars(parser.parse_args(args=[]))

        options = {k: v for k, v in options.items() if v is not None}

        self.assertEqual(options, {})

        parser = argparse.ArgumentParser()
        deposition_cmdline_parameters(parser)
        options = vars(parser.parse_args(args=[]))

        options = {k: v for k, v in options.items() if v is not None}

        self.assertEqual(options, {})

    @unittest.skipIf(not has_ase(), "Unable to import ase")
    @unittest.skipIf(not has_spglib(), "Unable to import spglib")
    @unittest.skipIf(not has_pycifrw(), "Unable to import PyCifRW")
    def test_export_trajectory(self):
        from aiida.tools.dbexporters.tcod import export_values

        cells = [[[
            2.,
            0.,
            0.,
        ], [
            0.,
            2.,
            0.,
        ], [
            0.,
            0.,
            2.,
        ]], [[
            3.,
            0.,
            0.,
        ], [
            0.,
            3.,
            0.,
        ], [
            0.,
            0.,
            3.,
        ]]]
        symbols = [['H', 'O', 'C'], ['H', 'O', 'C']]
        positions = [[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]],
                     [[0., 0., 0.], [0.75, 0.75, 0.75], [1.25, 1.25, 1.25]]]
        structurelist = []
        for i in range(0, 2):
            struct = orm.StructureData(cell=cells[i])
            for j, symbol in enumerate(symbols[i]):
                struct.append_atom(symbols=symbol, position=positions[i][j])
            structurelist.append(struct)

        td = orm.TrajectoryData(structurelist=structurelist)

        with self.assertRaises(ValueError):
            # Trajectory index is not specified
            v = export_values(td)

        expected_tags = [
            '_atom_site_fract_x', '_atom_site_fract_y', '_atom_site_fract_z',
            '_atom_site_label', '_atom_site_type_symbol',
            '_audit_conform_dict_location', '_audit_conform_dict_name',
            '_audit_conform_dict_version', '_audit_creation_method',
            '_cell_angle_alpha', '_cell_angle_beta', '_cell_angle_gamma',
            '_cell_length_a', '_cell_length_b', '_cell_length_c',
            '_chemical_formula_sum', '_symmetry_equiv_pos_as_xyz',
            '_symmetry_int_tables_number', '_symmetry_space_group_name_h-m',
            '_symmetry_space_group_name_hall'
        ]

        tcod_file_tags = [
            '_tcod_content_encoding_id',
            '_tcod_content_encoding_layer_id',
            '_tcod_content_encoding_layer_type',
            '_tcod_file_content_encoding',
            '_tcod_file_contents',
            '_tcod_file_id',
            '_tcod_file_md5sum',
            '_tcod_file_name',
            '_tcod_file_role',
            '_tcod_file_sha1sum',
            '_tcod_file_uri',
        ]

        # Not stored and not to be stored:
        v = export_values(td, trajectory_index=1)
        self.assertEqual(sorted(v['0'].keys()), expected_tags)

        # Stored, but not expected to be stored:
        td = orm.TrajectoryData(structurelist=structurelist)
        td.store()
        v = export_values(td, trajectory_index=1)
        self.assertEqual(sorted(v['0'].keys()), expected_tags + tcod_file_tags)

        # Not stored, but expected to be stored:
        td = orm.TrajectoryData(structurelist=structurelist)
        v = export_values(td, trajectory_index=1, store=True)
        self.assertEqual(sorted(v['0'].keys()), expected_tags + tcod_file_tags)

        # Both stored and expected to be stored:
        td = orm.TrajectoryData(structurelist=structurelist)
        td.store()
        v = export_values(td, trajectory_index=1, store=True)
        self.assertEqual(sorted(v['0'].keys()), expected_tags + tcod_file_tags)

        # Stored, but asked not to include DB dump:
        td = orm.TrajectoryData(structurelist=structurelist)
        td.store()
        v = export_values(td, trajectory_index=1, dump_aiida_database=False)
        self.assertEqual(sorted(v['0'].keys()), expected_tags)

    def test_contents_encoding_2(self):
        """
        Testing the logic of choosing the encoding and the process of
        encoding contents.
        """
        from aiida.tools.dbexporters.tcod import decode_textfield

        def check_ncr(self, inp, out):
            from aiida.tools.dbexporters.tcod import (encode_textfield_ncr,
                                                      decode_textfield_ncr)
            encoded = encode_textfield_ncr(inp)
            decoded = decode_textfield_ncr(out)
            decoded_universal = decode_textfield(out, 'ncr')
            self.assertEquals(encoded, out)
            self.assertEquals(decoded, inp)
            self.assertEquals(decoded_universal, inp)

        def check_quoted_printable(self, inp, out):
            from aiida.tools.dbexporters.tcod import (
                encode_textfield_quoted_printable,
                decode_textfield_quoted_printable)
            encoded = encode_textfield_quoted_printable(inp)
            decoded = decode_textfield_quoted_printable(out)
            decoded_universal = decode_textfield(out, 'quoted-printable')
            self.assertEquals(encoded, out)
            self.assertEquals(decoded, inp)
            self.assertEquals(decoded_universal, inp)

        def check_base64(self, inp, out):
            from aiida.tools.dbexporters.tcod import (encode_textfield_base64,
                                                      decode_textfield_base64)
            encoded = encode_textfield_base64(inp)
            decoded = decode_textfield_base64(out)
            decoded_universal = decode_textfield(out, 'base64')
            self.assertEquals(encoded, out)
            self.assertEquals(decoded, inp)
            self.assertEquals(decoded_universal, inp)

        def check_gzip_base64(self, text):
            from aiida.tools.dbexporters.tcod import (
                encode_textfield_gzip_base64, decode_textfield_gzip_base64)
            encoded = encode_textfield_gzip_base64(text)
            decoded = decode_textfield_gzip_base64(encoded)
            decoded_universal = decode_textfield(encoded, 'gzip+base64')
            self.assertEquals(text, decoded)
            self.assertEquals(text, decoded_universal)

        check_ncr(self, b'.', b'&#46;')
        check_ncr(self, b'?', b'&#63;')
        check_ncr(self, b';\n', b'&#59;\n')
        check_ncr(self, b'line\n;line', b'line\n&#59;line')
        check_ncr(self, b'tabbed\ttext', b'tabbed&#9;text')
        # Angstrom symbol 'Å' will be encoded as two bytes, thus encoding it
        # for CIF will produce two NCR entities, '&#195;' and '&#133;', one for
        # each byte.
        check_ncr(self, u'angstrom Å'.encode('utf-8'),
                  b'angstrom &#195;&#133;')
        check_ncr(self, b'<html>&#195;&#133;</html>',
                  b'<html>&#38;#195;&#38;#133;</html>')

        check_quoted_printable(self, b'.', b'=2E')
        check_quoted_printable(self, b'?', b'=3F')
        check_quoted_printable(self, b';\n', b'=3B\n')
        check_quoted_printable(self, b'line\n;line', b'line\n=3Bline')
        check_quoted_printable(self, b'tabbed\ttext', b'tabbed=09text')
        # Angstrom symbol 'Å' will be encoded as two bytes, thus encoding it
        # for CIF will produce two quoted-printable entities, '=C3' and '=85',
        # one for each byte.
        check_quoted_printable(self, u'angstrom Å'.encode('utf-8'),
                               b'angstrom =C3=85')
        check_quoted_printable(self, b'line\rline\x00', b'line=0Dline=00')
        # This one is particularly tricky: a long line is folded by the QP
        # and the semicolon sign becomes the first character on a new line.
        check_quoted_printable(
            self,
            u"Å{};a".format("".join("a"
                                    for i in range(0, 69))).encode('utf-8'),
            b'=C3=85aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
            b'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa=\n=3Ba')

        check_base64(self, u'angstrom ÅÅÅ'.encode('utf-8'),
                     b'YW5nc3Ryb20gw4XDhcOF')
        check_gzip_base64(self, u'angstrom ÅÅÅ'.encode('utf-8'))
Пример #4
0
                'The string "Successfully imported" was not found in the output'
                ' of verdi data structure import.')
            self.assertIn(
                b'PK', res.stdout_bytes,
                'The string "PK" was not found in the output'
                ' of verdi data structure import.')

    def test_list(self):
        self.data_listing_test(StructureData, 'BaO3Ti', self.ids)

    def test_export(self):
        self.data_export_test(StructureData, self.ids,
                              cmd_structure.EXPORT_FORMATS)


@unittest.skipUnless(has_pycifrw(), 'Unable to import PyCifRW')
class TestVerdiDataCif(AiidaTestCase, DummyVerdiDataListable,
                       DummyVerdiDataExportable):
    """Test verdi data cif."""
    valid_sample_cif_str = '''
        data_test
        _cell_length_a    10
        _cell_length_b    10
        _cell_length_c    10
        _cell_angle_alpha 90
        _cell_angle_beta  90
        _cell_angle_gamma 90
        _chemical_formula_sum 'C O2'
        loop_
        _atom_site_label
        _atom_site_fract_x
class TestCodDbImporter(AiidaTestCase):
    """Test the CodDbImporter class."""
    from aiida.orm.nodes.data.cif import has_pycifrw

    def test_query_construction_1(self):
        """Test query construction."""
        from aiida.tools.dbimporters.plugins.cod import CodDbImporter
        import re

        codi = CodDbImporter()
        q_sql = codi.query_sql(
            id=['1000000', 3000000],
            element=['C', 'H', 'Cl'],
            number_of_elements=5,
            chemical_name=['caffeine', 'serotonine'],
            formula=['C6 H6'],
            volume=[100, 120.005],
            spacegroup='P -1',
            a=[10.0 / 3, 1],
            alpha=[10.0 / 6, 0],
            measurement_temp=[0, 10.5],
            measurement_pressure=[1000, 1001],
            determination_method=['single crystal', None]
        )

        # Rounding errors occur in Python 3 thus they are averted using
        # the following precision stripping regular expressions.
        q_sql = re.sub(r'(\d\.\d{6})\d+', r'\1', q_sql)
        q_sql = re.sub(r'(120.00)39+', r'\g<1>4', q_sql)

        self.assertEqual(q_sql, \
                          'SELECT file, svnrevision FROM data WHERE '
                          "(status IS NULL OR status != 'retracted') AND "
                          '(a BETWEEN 3.332333 AND 3.334333 OR '
                          'a BETWEEN 0.999 AND 1.001) AND '
                          '(alpha BETWEEN 1.665666 AND 1.667666 OR '
                          'alpha BETWEEN -0.001 AND 0.001) AND '
                          "(chemname LIKE '%caffeine%' OR "
                          "chemname LIKE '%serotonine%') AND "
                          "(method IN ('single crystal') OR method IS NULL) AND "
                          "(formula REGEXP ' C[0-9 ]' AND "
                          "formula REGEXP ' H[0-9 ]' AND "
                          "formula REGEXP ' Cl[0-9 ]') AND "
                          "(formula IN ('- C6 H6 -')) AND "
                          '(file IN (1000000, 3000000)) AND '
                          '(cellpressure BETWEEN 999 AND 1001 OR '
                          'cellpressure BETWEEN 1000 AND 1002) AND '
                          '(celltemp BETWEEN -0.001 AND 0.001 OR '
                          'celltemp BETWEEN 10.499 AND 10.501) AND '
                          "(nel IN (5)) AND (sg IN ('P -1')) AND "
                          '(vol BETWEEN 99.999 AND 100.001 OR '
                          'vol BETWEEN 120.004 AND 120.006)')

    def test_datatype_checks(self):
        """Rather complicated, but wide-coverage test for data types, accepted
        and rejected by CodDbImporter._*_clause methods."""
        from aiida.tools.dbimporters.plugins.cod import CodDbImporter

        codi = CodDbImporter()
        messages = [
            '', "incorrect value for keyword 'test' only integers and strings are accepted",
            "incorrect value for keyword 'test' only strings are accepted",
            "incorrect value for keyword 'test' only integers and floats are accepted",
            "invalid literal for int() with base 10: 'text'"
        ]
        values = [10, 'text', 'text', '10', 1.0 / 3, [1, 2, 3]]
        methods = [
            # pylint: disable=protected-access
            codi._int_clause,
            codi._str_exact_clause,
            codi._formula_clause,
            codi._str_fuzzy_clause,
            codi._composition_clause,
            codi._volume_clause
        ]
        results = [[0, 4, 4, 0, 1, 1], [0, 0, 0, 0, 1, 1], [2, 0, 0, 0, 2, 2], [0, 0, 0, 0, 1, 1], [2, 0, 0, 0, 2, 2],
                   [0, 3, 3, 3, 0, 3]]

        for i, _ in enumerate(methods):
            for j, _ in enumerate(values):
                message = messages[0]
                try:
                    methods[i]('test', 'test', [values[j]])
                except ValueError as exc:
                    message = str(exc)
                self.assertEqual(message, messages[results[i][j]])

    def test_dbentry_creation(self):
        """Tests the creation of CodEntry from CodSearchResults."""
        from aiida.tools.dbimporters.plugins.cod \
            import CodSearchResults

        results = CodSearchResults([{
            'id': '1000000',
            'svnrevision': None
        }, {
            'id': '1000001',
            'svnrevision': '1234'
        }, {
            'id': '2000000',
            'svnrevision': '1234'
        }])
        self.assertEqual(len(results), 3)
        self.assertEqual(
            results.at(1).source, {
                'db_name': 'Crystallography Open Database',
                'db_uri': 'http://www.crystallography.net/cod',
                'extras': {},
                'id': '1000001',
                'license': 'CC0',
                'source_md5': None,
                'uri': 'http://www.crystallography.net/cod/1000001.cif@1234',
                'version': '1234',
            }
        )
        self.assertEqual([x.source['uri'] for x in results], [
            'http://www.crystallography.net/cod/1000000.cif', 'http://www.crystallography.net/cod/1000001.cif@1234',
            'http://www.crystallography.net/cod/2000000.cif@1234'
        ])

    @unittest.skipIf(not has_pycifrw(), 'Unable to import PyCifRW')
    def test_dbentry_to_cif_node(self):
        """Tests the creation of CifData node from CodEntry."""
        from aiida.orm import CifData
        from aiida.tools.dbimporters.plugins.cod import CodEntry

        entry = CodEntry('http://www.crystallography.net/cod/1000000.cif')
        entry.cif = "data_test _publ_section_title 'Test structure'"

        cif = entry.get_cif_node()
        self.assertEqual(isinstance(cif, CifData), True)
        self.assertEqual(cif.get_attribute('md5'), '070711e8e99108aade31d20cd5c94c48')
        self.assertEqual(
            cif.source, {
                'db_name': 'Crystallography Open Database',
                'db_uri': 'http://www.crystallography.net/cod',
                'id': None,
                'version': None,
                'extras': {},
                'source_md5': '070711e8e99108aade31d20cd5c94c48',
                'uri': 'http://www.crystallography.net/cod/1000000.cif',
                'license': 'CC0',
            }
        )