def test_check(capsys, api_copy): commands.check(_args(api_copy, 'refs')) args = _args(api_copy) commands.check(args) assert 'family' in capsys.readouterr()[0] msgs = [a[0] for a, _ in args.log.error.call_args_list] assert all('unregistered glottocode' in m for m in msgs) assert len(msgs) == 5 copytree(api_copy.tree / 'abcd1234' / 'abcd1235', api_copy.tree / 'abcd1235') args = _args(api_copy) commands.check(args) msgs = [a[0] for a, _ in args.log.error.call_args_list] assert any('duplicate glottocode' in m for m in msgs) assert len(msgs) == 7 (api_copy.tree / 'abcd1235').rename(api_copy.tree / 'abcd1237') args = _args(api_copy) commands.check(args) msgs = [a[0] for a, _ in args.log.error.call_args_list] assert any('duplicate hid' in m for m in msgs) assert len(msgs) == 9
def _do_test(config_files, inspector=None): config = TEST_CASE.make_cfg([config_path(cf).as_posix() for cf in config_files]) xml = beastling.beastxml.BeastXml(config) temp_filename = TEST_CASE.tmp_path('test').as_posix() xml.write_file(temp_filename) if os.environ.get('TRAVIS'): et.parse(temp_filename) else: if not TEST_CASE.tmp_path('tests').exists(): copytree(tests_path(), TEST_CASE.tmp_path('tests')) try: if config_files in skip: raise SkipTest check_call( ['beast', '-java', '-overwrite', temp_filename], cwd=TEST_CASE.tmp.as_posix(), stdout=PIPE, stderr=PIPE) except CalledProcessError as e: raise AssertionError( "Beast run on {:} returned non-zero exit status " "{:d}".format( config_files, e.returncode)) if inspector: inspector(TEST_CASE.tmp)
def test_Manifest(tmppath): d = Path(__file__).parent m = {k: v for k, v in Manifest.from_dir(d).items()} copytree(d, tmppath / 'd') assert m == Manifest.from_dir(tmppath / 'd') copytree(d, tmppath / 'd' / 'd') assert m != Manifest.from_dir(tmppath / 'd')
def test_copytree(self): from clldutils.path import copytree dst = self.tmp_path('a', 'b') copytree(self.tmp_path(), dst) self.assertTrue(dst.exists()) self.assertRaises(OSError, copytree, dst, dst)
def test_check(capsys, _main, mocker, api_copy): _main('check --bib-only') log = mocker.Mock() _main('check --tree-only', log=log) assert 'family' in capsys.readouterr()[0] msgs = [a[0] for a, _ in log.error.call_args_list] assert any('unregistered glottocode' in m for m in msgs) assert any('missing reference' in m for m in msgs) assert len(msgs) == 27 copytree(api_copy.tree / 'abcd1234' / 'abcd1235', api_copy.tree / 'abcd1235') log = mocker.Mock() _main('check --tree-only', log=log) msgs = [a[0] for a, _ in log.error.call_args_list] assert any('duplicate glottocode' in m for m in msgs) assert len(msgs) == 29 (api_copy.tree / 'abcd1235').rename(api_copy.tree / 'abcd1237') log = mocker.Mock() _main('check --tree-only', log=log) msgs = [a[0] for a, _ in log.error.call_args_list] assert any('duplicate hid' in m for m in msgs) assert len(msgs) >= 9
def repos(tmppath, git_repo_factory): repos = tmppath / 'lexibank-data' copytree(Path(__file__).parent.joinpath('repos'), repos) git_repo_factory(repos) git_repo_factory(repos / 'datasets' / 'test_dataset') git_repo_factory(repos / 'datasets' / 'test_dataset_cldf') copy(Path(pylexibank.__file__).parent.joinpath('cldf-metadata.json'), repos) yield repos
def test_copytree(tmppath): from clldutils.path import copytree dst = tmppath / 'a' / 'b' copytree(tmppath, dst) assert dst.exists() with pytest.raises(OSError): copytree(dst, dst)
def abvd_dataset(repos, tmpdir, glottolog, concepticon): copytree(repos / 'datasets' / 'abvd', str(tmpdir.join('abvd'))) class Dataset(abvd.BVD): id = 'x' SECTION = 'y' dir = Path(str(tmpdir.join('abvd'))) return Dataset(glottolog=glottolog, concepticon=concepticon)
def abvd_dataset(repos, tmp_path, glottolog, concepticon): copytree(repos / 'datasets' / 'abvd', tmp_path / 'abvd') class Dataset(abvd.BVD): id = 'x' SECTION = 'y' dir = tmp_path / 'abvd' return Dataset(glottolog=glottolog, concepticon=concepticon)
def sndcmp2_dataset(repos, tmpdir, glottolog, concepticon): copytree(repos / 'datasets' / 'sndcmp', str(tmpdir.join('sndcmp'))) class Dataset(SNDCMP): dir = str(tmpdir.join('sndcmp')) id = "sndcmpvanuatu" study_name = "Vanuatu" source_id_array = ["Shimelman2019"] create_cognates = False return Dataset()
def setUp(self): WithTempDir.setUp(self) self.repos = self.tmp_path() self.languoids = self.tmp_path('languoids') copytree(Path(__file__).parent.joinpath('data', 'languoids'), self.languoids) self.tree = self.languoids.joinpath('tree') self.references = self.tmp_path('references') copytree(Path(__file__).parent.joinpath('data', 'references'), self.references) self.tmp_path('build').mkdir()
def sndcmp_dl_dataset(repos, tmpdir, glottolog, concepticon): copytree(repos / 'datasets' / 'sndcmp', str(tmpdir.join('sndcmp'))) class Dataset(SNDCMP): dir = str(tmpdir.join('sndcmp')) id = "sndcmpbrazil" study_name = "Brazil" second_gloss_lang = None source_id_array = ["xy"] create_cognates = False return Dataset()
def recode(args): """Assign a new glottocode to an existing languoid. glottolog recode <code> """ lang = find_languoid(glottocode=args.args[0]) if not lang: raise ParserError('languoid not found') lang.id = Glottocode.from_name(lang.name) new_dir = lang.dir.parent.joinpath(lang.id) copytree(lang.dir, new_dir) lang.write_info(new_dir) remove(new_dir.joinpath('%s.ini' % args.args[0])) rmtree(lang.dir) print("%s -> %s" % (args.args[0], lang.id))
def recode(args): """Assign a new glottocode to an existing languoid. glottolog recode <code> """ lang = args.repos.languoid(args.args[0]) if not lang: raise ParserError('languoid not found') lang.id = Glottocode.from_name(lang.name) new_dir = lang.dir.parent.joinpath(lang.id) copytree(lang.dir, new_dir) lang.write_info(new_dir) remove(new_dir.joinpath('%s.ini' % args.args[0])) rmtree(lang.dir) print("%s -> %s" % (args.args[0], lang.id))
def setUp(self): WithTempDir.setUp(self) self.repos = self.tmp_path() self.languoids = self.tmp_path('languoids') copytree( Path(__file__).parent.joinpath('data', 'languoids'), self.languoids) self.tree = self.languoids.joinpath('tree') self.references = self.tmp_path('references') copytree( Path(__file__).parent.joinpath('data', 'references'), self.references) self.tmp_path('build').mkdir()
def test_check(self): from pyglottolog.commands import check with capture(check, self._args('refs')): pass with capture(check, self._args()) as out: self.assertIn('family', out) for call in self.log.error.call_args_list: self.assertIn('unregistered glottocode', call[0][0]) self.assertEqual(self.log.error.call_count, 4) copytree(self.api.tree.joinpath('abcd1234', 'abcd1235'), self.api.tree.joinpath('abcd1235')) with capture(check, self._args()): self.assertIn( 'duplicate glottocode', ''.join(c[0][0] for c in self.log.error.call_args_list)) self.assertEqual(self.log.error.call_count, 6)
def sndcmp_dataset(repos, tmpdir, glottolog, concepticon): copytree(repos / 'datasets' / 'sndcmp', str(tmpdir.join('sndcmp'))) class CustomConcept(SNDCMPConcept): Bislama_Gloss = attr.ib(default=None) class Dataset(SNDCMP): dir = str(tmpdir.join('sndcmp')) id = "sndcmpvanuatu" study_name = "Vanuatu" second_gloss_lang = "Bislama" source_id_array = ["Shimelman2019"] create_cognates = True concept_class = CustomConcept form_placeholder = 'P' only_proto_forms = True def get_source_id_array(self, lexeme): return ['ab'] return Dataset()
def lff2tree(tree=TREE, outdir=None, test=False): """ - get mapping glottocode -> Languoid from old tree - assemble new directory tree - for each path component in lff/dff: - create new dir - copy info file from old tree (possibly updating the name) or - create info file - for each language/dialect in lff/dff: - create new dir - copy info file from old tree (possibly updating the name) or - create info file - rm old tree - copy new tree """ out = Path(outdir or build_path('tree')) if not out.parent.exists(): out.parent.mkdir() if out.exists(): rmtree(out) out.mkdir() old_tree = {l.id: l for l in walk_tree(tree)} if tree else {} languages = {} for lang in read_lff('language'): languages[lang.id] = lang lang2tree(lang, lang.lineage, out, old_tree) for lang in read_lff('dialect'): if not lang.lineage or lang.lineage[0][1] not in languages: raise ValueError('unattached dialect') lang2tree( lang, languages[lang.lineage[0][1]].lineage + lang.lineage, out, old_tree) if not test: rmtree(TREE, ignore_errors=True) copytree(out, TREE)
def api_copy(tmpdir, repos_path): """Glottolog instance from isolated directory copy.""" repos_copy = str(tmpdir / 'repos') path.copytree(str(repos_path), repos_copy) return pyglottolog.Glottolog(repos_copy)
def setUp(self): WithTempDir.setUp(self) self.repos = self.tmp_path('repos') copytree(Path(__file__).parent.joinpath('data'), self.repos) self.api = Glottolog(self.repos)
def sndcmp_dir(tmp_path, repos): copytree(repos / 'datasets' / 'sndcmp', tmp_path / 'sndcmp') return tmp_path / 'sndcmp'
def repos(tmpd): repos = tmpd / 'lexibank-data' copytree(Path(__file__).parent.joinpath('repos'), repos) copy( Path(pylexibank.__file__).parent.joinpath('cldf-metadata.json'), repos) yield repos
def bibfiles_copy(tmpdir, references_path): references_copy = tmpdir / 'references' path.copytree(str(references_path), str(references_copy)) return pyglottolog.references.BibFiles.from_path(str(references_copy))
def test_dataset(tmppath): copytree( Path(__file__).parent / 'repo_data' / 'test_dataset', tmppath / 'test_dataset') return tmppath / 'test_dataset' / 'td.py'