def _clean(self, **kw): self.log.debug('removing CLDF directory %s' % self.cldf_dir) if self.cldf_dir.exists(): for f in self.cldf_dir.iterdir(): if f.is_file(): remove(f) else: rmtree(f)
def test_rmtree(self): from clldutils.path import rmtree self.assertRaises(OSError, rmtree, self.tmp_path('nonexistingpath')) rmtree(self.tmp_path('nonexistingpath'), ignore_errors=True) tmp = self.tmp_path('test') tmp.mkdir() self.assertTrue(tmp.exists()) rmtree(tmp) self.assertFalse(tmp.exists())
def recode(args): """Assign a new glottocode to an existing languoid. glottolog recode <code> """ lang = args.repos.languoid(args.args[0]) if not lang: raise ParserError('languoid not found') lang.id = Glottocode.from_name(lang.name) new_dir = lang.dir.parent.joinpath(lang.id) copytree(lang.dir, new_dir) lang.write_info(new_dir) remove(new_dir.joinpath('%s.ini' % args.args[0])) rmtree(lang.dir) print("%s -> %s" % (args.args[0], lang.id))
def recode(args): """Assign a new glottocode to an existing languoid. glottolog recode <code> """ lang = find_languoid(glottocode=args.args[0]) if not lang: raise ParserError('languoid not found') lang.id = Glottocode.from_name(lang.name) new_dir = lang.dir.parent.joinpath(lang.id) copytree(lang.dir, new_dir) lang.write_info(new_dir) remove(new_dir.joinpath('%s.ini' % args.args[0])) rmtree(lang.dir) print("%s -> %s" % (args.args[0], lang.id))
def test_Files(self): from clld.db.models.common import Sentence, Sentence_files if PY3: return # pragma: no cover l = Sentence(id='abc', name='Name') f = Sentence_files(object=l, id='abstract', mime_type='audio/mpeg') p = f.create(Path(mkdtemp()).joinpath('clldtest').as_posix(), 'content') assert os.path.exists(p) rmtree(Path(p).parent.parent) l._files.append(f) DBSession.add(l) DBSession.flush() DBSession.refresh(l) assert l.files assert l.audio
def test_Files(self): from clld.db.models.common import Sentence, Sentence_files if PY3: return # pragma: no cover l = Sentence(id='abc', name='Name') f = Sentence_files(object=l, id='abstract', mime_type='audio/mpeg') p = f.create( Path(mkdtemp()).joinpath('clldtest').as_posix(), 'content') assert os.path.exists(p) rmtree(Path(p).parent.parent) l._files.append(f) DBSession.add(l) DBSession.flush() DBSession.refresh(l) assert l.files assert l.audio
def test_freeze(self): from clld.scripts.freeze import freeze_func, unfreeze_func tmp = Path(mkdtemp()) tmp.joinpath('data').mkdir() tmp.joinpath('appname').mkdir() class Args(object): env = self.env module_dir = tmp.joinpath('appname').resolve() module = Mock(__name__='appname') def data_file(self, *comps): return tmp.resolve().joinpath('data', *comps) DBSession.flush() args = Args() freeze_func(args, dataset=Dataset.first(), with_history=False) self.assert_(tmp.joinpath('data.zip').exists()) engine = create_engine('sqlite://') Base.metadata.create_all(engine) self.assertEqual( engine.execute('select count(*) from language').fetchone()[0], 0) unfreeze_func(args, engine=engine) s1 = DBSession s2 = sessionmaker(bind=engine)() self.assertEqual( s1.query(Language).count(), s2.query(Language).count()) l1 = s1.query(Language).filter(Language.latitude != null()).first() l2 = s2.query(Language).filter(Language.pk == l1.pk).first() self.assertEqual(l1.created, l2.created) self.assertEqual(l1.latitude, l2.latitude) self.assertEqual(l1.description, l2.description) contrib = s2.query(Contribution).filter( Contribution.id == 'contribution').one() self.assert_(contrib.primary_contributors) self.assert_(contrib.secondary_contributors) rmtree(tmp, ignore_errors=True)
def get_langs_index(api, recreate=False): index_dir = api.build_path('whoosh_langs') if index_dir.exists() and recreate: rmtree(index_dir) # pragma: no cover if not index_dir.exists(): index_dir.mkdir() schema = Schema( id=ID(stored=True), name=TEXT(stored=True), fname=ID(stored=True), iso=ID(stored=True), level=KEYWORD(scorable=True, stored=True), macroarea=KEYWORD(scorable=True), country=KEYWORD(scorable=True), latitude=NUMERIC(), longitude=NUMERIC(), ini=TEXT(analyzer=StemmingAnalyzer(), stored=True), ) return index.create_in(index_dir.as_posix(), schema) return index.open_dir(index_dir.as_posix())
def test_freeze(self): from clld.scripts.freeze import freeze_func, unfreeze_func tmp = Path(mkdtemp()) tmp.joinpath('data').mkdir() tmp.joinpath('appname').mkdir() class Args(object): env = self.env module_dir = tmp.joinpath('appname').resolve() module = Mock(__name__='appname') def data_file(self, *comps): return tmp.resolve().joinpath('data', *comps) DBSession.flush() args = Args() freeze_func(args, dataset=Dataset.first(), with_history=False) self.assert_(tmp.joinpath('data.zip').exists()) engine = create_engine('sqlite://') Base.metadata.create_all(engine) self.assertEqual( engine.execute('select count(*) from language').fetchone()[0], 0) unfreeze_func(args, engine=engine) s1 = DBSession s2 = sessionmaker(bind=engine)() self.assertEqual(s1.query(Language).count(), s2.query(Language).count()) l1 = s1.query(Language).filter(Language.latitude != null()).first() l2 = s2.query(Language).filter(Language.pk == l1.pk).first() self.assertEqual(l1.created, l2.created) self.assertEqual(l1.latitude, l2.latitude) self.assertEqual(l1.description, l2.description) contrib = s2.query(Contribution).filter(Contribution.id == 'contribution').one() self.assert_(contrib.primary_contributors) self.assert_(contrib.secondary_contributors) rmtree(tmp, ignore_errors=True)
def unfreeze_func(args, engine=None): try: importlib.import_module(args.module.__name__) except ImportError: pass # pragma: no cover engine = engine or DBSession.get_bind() data_dir = Path(mkdtemp()) with ZipFile(as_posix(args.module_dir.joinpath('..', 'data.zip'))) as fp: fp.extractall(as_posix(data_dir)) db_version = None for table in Base.metadata.sorted_tables: csv = data_dir.joinpath('%s.csv' % table.name) if csv.exists(): db_version = load(table, csv, engine) if db_version: set_alembic_version(engine, db_version) rmtree(data_dir)
def unfreeze_func(args, engine=None): try: importlib.import_module(args.module.__name__) except ImportError: pass # pragma: no cover engine = engine or DBSession.get_bind() data_dir = Path(mkdtemp()) with ZipFile(as_posix(args.module_dir.joinpath('..', 'data.zip'))) as fp: fp.extractall(as_posix(data_dir)) db_version = None for table in Base.metadata.sorted_tables: csv = data_dir.joinpath('%s.csv' % table.name) if csv.exists(): db_version = load(table, csv, engine) if db_version: set_alembic_version(engine, db_version) # pragma: no cover rmtree(data_dir)
def get_index(api, recreate=False, must_exist=False): index_dir = api.ftsindex if index_dir.exists(): if recreate: rmtree(index_dir) # pragma: no cover elif must_exist: raise ValueError('No whoosh index found at {0}.'.format(index_dir)) if not index_dir.exists(): index_dir.mkdir() schema = Schema(id=ID(stored=True), provider=KEYWORD(stored=True), authoryear=TEXT(stored=True), title=TEXT(analyzer=StemmingAnalyzer(), stored=True), author=TEXT(stored=True), year=TEXT(stored=True), doctype=TEXT(stored=True), lgcode=TEXT(stored=True), body=TEXT(), tags=KEYWORD) return index.create_in(index_dir.as_posix(), schema) return index.open_dir(index_dir.as_posix())
def test_rmtree(tmppath): from clldutils.path import rmtree with pytest.raises(OSError): rmtree(tmppath / 'nonexistingpath') rmtree(tmppath / 'nonexistingpath', ignore_errors=True) tmp = tmppath / 'test' tmp.mkdir() assert tmp.exists() rmtree(tmp) assert not tmp.exists()
def tearDown(self): rmtree(self.tmp, ignore_errors=True) super(WithTempDirMixin, self).tearDown()
def tmpd(): d = Path(mkdtemp()) yield d rmtree(d)