def test_loader_methods(self): t = TempFS() self._init_modules(t) ih = FSImportHook(t) sys.meta_path.append(ih) try: self.assertEquals(ih.find_module("fsih_hello"),ih) self.assertEquals(ih.find_module("fsih_helo"),None) self.assertEquals(ih.find_module("fsih_pkg"),ih) self.assertEquals(ih.find_module("fsih_pkg.sub1"),ih) self.assertEquals(ih.find_module("fsih_pkg.sub2"),ih) self.assertEquals(ih.find_module("fsih_pkg.sub3"),None) m = ih.load_module("fsih_hello") self.assertEquals(m.message,"hello world!") self.assertRaises(ImportError,ih.load_module,"fsih_helo") ih.load_module("fsih_pkg") m = ih.load_module("fsih_pkg.sub1") self.assertEquals(m.message,"hello world!") self.assertEquals(m.a,42) m = ih.load_module("fsih_pkg.sub2") self.assertEquals(m.message,"hello world!") self.assertEquals(m.a,42 * 2) self.assertRaises(ImportError,ih.load_module,"fsih_pkg.sub3") finally: sys.meta_path.remove(ih) t.close()
class TestDokan(unittest.TestCase,DokanTestCases,ThreadingTestCases): def setUp(self): self.temp_fs = TempFS() self.drive = "K" while os.path.exists(self.drive+":\\") and self.drive <= "Z": self.drive = chr(ord(self.drive) + 1) if self.drive > "Z": raise RuntimeError("no free drive letters") fs_to_mount = OSFS(self.temp_fs.getsyspath("/")) self.mount_proc = dokan.mount(fs_to_mount,self.drive)#,flags=dokan.DOKAN_OPTION_DEBUG|dokan.DOKAN_OPTION_STDERR,numthreads=1) self.fs = OSFS(self.mount_proc.path) def tearDown(self): self.mount_proc.unmount() for _ in xrange(10): try: if self.mount_proc.poll() is None: self.mount_proc.terminate() except EnvironmentError: time.sleep(0.1) else: break else: if self.mount_proc.poll() is None: self.mount_proc.terminate() self.temp_fs.close()
def test_importer_on_meta_path(self): t = TempFS() self._init_modules(t) ih = FSImportHook(t) sys.meta_path.append(ih) try: self._check_imports_are_working() finally: sys.meta_path.remove(ih) t.close()
class TestImporter (TestCase): def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.importer = Importer() def test_finds_index_file_in_a_subdirectory(self): self.fs.makedir("directory") self.fs.setcontents("directory/file.txt", "test") index_file = self.importer._find_index_file(self.fs, ["*.txt"]) assert_equals("directory/file.txt", index_file)
class TestCSVExport (TestCase): def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.exporter = CSVExporter(self.fs, m.HTMLMarkupExporter(self)) def test_single_card(self): card = m.ContentObject() card['question'] = u'Question' card['answer'] = u'Answer' self.exporter([card]) expected = '"Question","Answer"\r\n' assert_equals(expected, self.fs.getcontents('index.csv')) def test_multiple_cards(self): card = m.ContentObject() card['question'] = u'Question' card['answer'] = u'Answer' card2 = m.ContentObject() card2['question'] = u'Question 2' card2['answer'] = u'Answer 2' self.exporter([card, card2]) expected = '"Question","Answer"\r\n"Question 2","Answer 2"\r\n' assert_equals(expected, self.fs.getcontents('index.csv')) def test_multiline_question_and_answer(self): card = m.ContentObject() card['question'] = u"Question\nend of question" card['answer'] = u"Answer\nend of answer" self.exporter([card]) expected = '"Question\r\nend of question","Answer\r\nend of answer"\r\n' assert_equals(expected, self.fs.getcontents('index.csv')) def test_custom_encoding(self): card = m.ContentObject() card['question'] = u"chrząszcz brzmi w trzcinie" card['answer'] = u"zażółć gęślą jaźń" self.exporter.encoding = 'cp1250' self.exporter([card]) expected = u'"chrząszcz brzmi w trzcinie","zażółć gęślą jaźń"\r\n'.encode('cp1250') assert_equals(expected, self.fs.getcontents('index.csv')) def test_card_with_image(self): self.images = [m.Image(filename='img2'), m.Image(filename='img1')] card = m.ContentObject() card['question'] = u'Question <img src="img2" />' card['answer'] = u'Answer <img src="img1" />' self.exporter([card]) expected = '"Question <img src=""images/img2.jpg""/>","Answer <img src=""images/img1.jpg""/>"\r\n' assert_equals(expected, self.fs.getcontents('index.csv'))
def setUp(self): self.temp_fs = TempFS() self.temp_fs.makedir("root") self.temp_fs.makedir("mount") self.mounted_fs = self.temp_fs.opendir("root") self.mount_point = self.temp_fs.getsyspath("mount") self.fs = OSFS(self.temp_fs.getsyspath("mount")) self.mount_proc = fuse.mount(self.mounted_fs,self.mount_point)
def test_url_on_sys_path(self): t = TempFS() zpath = t.getsyspath("modules.zip") z = ZipFS(zpath,"w") self._init_modules(z) z.close() z = ZipFS(zpath,"r") assert z.isfile("fsih_hello.py") z.close() sys.path.append("zip://" + zpath) FSImportHook.install() try: self._check_imports_are_working() finally: sys.path_hooks.remove(FSImportHook) sys.path.pop() t.close()
def snapshot(self, path): """Takes a snapshot of an individual file.""" # try grabbing the temp filesystem system path temp_dir = None temp_dir = self.tmp.getsyspath('/') # Create a temp file system to be snapshotted temp_snapshot_fs = TempFS(temp_dir=temp_dir) src_path = temp_snapshot_fs.getsyspath('/') with self.fs.open(path, 'rb') as source_file: with temp_snapshot_fs.open('datafile', 'wb') as temp_file: shutil.copyfileobj(source_file, temp_file) # snapshot destination directory dest_dir = self.snapshot_snap_path(path) command = ['rdiff-backup', '--parsable-output', '--no-eas', '--no-file-statistics', '--no-acls', '--tempdir', self.tmp.getsyspath('/'), src_path, dest_dir] # speed up the tests if self.__testing: command.insert(5, '--current-time') command.insert(6, str(self.__testing['time'])) self.__testing['time'] += 1 process = Popen(command, stdout=PIPE, stderr=PIPE) stderr = process.communicate()[1] ignore = [lambda x: x.startswith("Warning: could not determine case")] if len(stderr) is not 0: for rule in ignore: if not rule(stderr): raise SnapshotError(stderr) # close the temp snapshot filesystem temp_snapshot_fs.close()
def setUp(self): self.temp_fs = TempFS() self.drive = "K" while os.path.exists(self.drive+":\\") and self.drive <= "Z": self.drive = chr(ord(self.drive) + 1) if self.drive > "Z": raise RuntimeError("no free drive letters") fs_to_mount = OSFS(self.temp_fs.getsyspath("/")) self.mount_proc = dokan.mount(fs_to_mount,self.drive)#,flags=dokan.DOKAN_OPTION_DEBUG|dokan.DOKAN_OPTION_STDERR,numthreads=1) self.fs = OSFS(self.mount_proc.path)
class TestSuperMemoQAExport (TestCase): def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.exporter = SuperMemoQAExporter(self.fs) self.exporter.index_file = 'out.txt' def test_single_card(self): card = m.ContentObject() card['question'] = u'Question' card['answer'] = u'Answer' self.exporter([card]) expected = "Q: Question\r\nA: Answer\r\n" assert_equals(expected, self.fs.getcontents('out.txt')) def test_multiple_cards(self): card = m.ContentObject() card['question'] = u'Question' card['answer'] = u'Answer' card2 = m.ContentObject() card2['question'] = u'Question 2' card2['answer'] = u'Answer 2' self.exporter([card, card2]) expected = "Q: Question\r\nA: Answer\r\n\r\nQ: Question 2\r\nA: Answer 2\r\n" assert_equals(expected, self.fs.getcontents('out.txt')) def test_multiline_question_and_answer(self): card = m.ContentObject() card['question'] = u"Question\nend of question" card['answer'] = u"Answer\nend of answer" self.exporter([card]) expected = "Q: Question\r\nQ: end of question\r\nA: Answer\r\nA: end of answer\r\n" assert_equals(expected, self.fs.getcontents('out.txt')) def test_custom_encoding(self): card = m.ContentObject() card['question'] = u"chrząszcz brzmi w trzcinie" card['answer'] = u"zażółć gęślą jaźń" self.exporter.encoding = 'cp1250' self.exporter([card]) expected = u'Q: chrząszcz brzmi w trzcinie\r\nA: zażółć gęślą jaźń\r\n'.encode('cp1250') assert_equals(expected, self.fs.getcontents('out.txt'))
class TestCacheFS(unittest.TestCase,FSTestCases,ThreadingTestCases): """Test simple operation of CacheFS""" def setUp(self): self._check_interval = sys.getcheckinterval() sys.setcheckinterval(10) self.wrapped_fs = TempFS() self.fs = CacheFS(self.wrapped_fs,cache_timeout=0.01) def tearDown(self): self.fs.close() sys.setcheckinterval(self._check_interval) def test_values_are_used_from_cache(self): old_timeout = self.fs.cache_timeout self.fs.cache_timeout = None try: self.assertFalse(self.fs.isfile("hello")) self.wrapped_fs.setcontents("hello","world") self.assertTrue(self.fs.isfile("hello")) self.wrapped_fs.remove("hello") self.assertTrue(self.fs.isfile("hello")) self.fs.clear_cache() self.assertFalse(self.fs.isfile("hello")) finally: self.fs.cache_timeout = old_timeout def test_values_are_updated_in_cache(self): old_timeout = self.fs.cache_timeout self.fs.cache_timeout = None try: self.assertFalse(self.fs.isfile("hello")) self.wrapped_fs.setcontents("hello","world") self.assertTrue(self.fs.isfile("hello")) self.wrapped_fs.remove("hello") self.assertTrue(self.fs.isfile("hello")) self.wrapped_fs.setcontents("hello","world") self.assertTrue(self.fs.isfile("hello")) self.fs.remove("hello") self.assertFalse(self.fs.isfile("hello")) finally: self.fs.cache_timeout = old_timeout
def setUp(self): TestCase.setUp(self) self.fs = TempFS() factory = m.ImportedInstanceFactory(self, field_types={ 'question': 'html', 'answer': 'html' }) self.importer = SuperMemoQAImporter(self.fs, factory, m.HTMLMarkupImporter(self)) self.cos = [] self.images = [] self.sounds = []
class TestFUSE(unittest.TestCase,FSTestCases): def setUp(self): self.temp_fs = TempFS() self.temp_fs.makedir("root") self.temp_fs.makedir("mount") self.mounted_fs = self.temp_fs.opendir("root") self.mount_point = self.temp_fs.getsyspath("mount") self.fs = OSFS(self.temp_fs.getsyspath("mount")) self.mount_proc = fuse.mount(self.mounted_fs,self.mount_point) def tearDown(self): self.mount_proc.unmount() self.temp_fs.close() def check(self,p): return self.mounted_fs.exists(p)
def save(self, data, file_format, compress_level=0): """Method for saving a feature.""" gz_extension = ("." + MimeType.GZIP.extension) if compress_level else "" path = f"{self.path}.{file_format.extension}{gz_extension}" if isinstance(self.filesystem, (fs.osfs.OSFS, TempFS)): with TempFS(temp_dir=self.filesystem.root_path) as tempfs: self._save(tempfs, data, "tmp_feature", file_format, compress_level) fs.move.move_file(tempfs, "tmp_feature", self.filesystem, path) return self._save(self.filesystem, data, path, file_format, compress_level)
def test_make_version_view(self) -> None: with make_mv_osfs(join(self.temp_dir, "foo")) as base_fs: mv = Multiversioned(base_fs) lidvid = LIDVID("urn:nasa:pds:b::1.0") no_lidvids: Set[LIDVID] = set() mv[lidvid] = VersionContents.create_from_lidvids( no_lidvids, TempFS(), set()) names = OSFS(self.temp_dir).walk.dirs() self.assertEqual({"/foo-mv", "/foo-mv/b", "/foo-mv/b/v$1.0"}, set(names)) with make_version_view(base_fs, "b") as vv: self.assertEqual(["/b$"], list(vv.walk.dirs()))
def startServer(self): port = 3000 self.temp_fs = TempFS() self.server = None while not self.server: try: self.server = self.makeServer(self.temp_fs, ("127.0.0.1", port)) except socket.error, e: if e.args[1] == "Address already in use": port += 1 else: raise
def save(self, data, file_format, compress_level=0): """ Method for saving a feature """ gz_extension = FileFormat.GZIP.extension() if compress_level else '' path = self.path + file_format.extension() + gz_extension if isinstance(self.filesystem, (fs.osfs.OSFS, TempFS)): with TempFS(temp_dir=self.filesystem.root_path) as tempfs: self._save(tempfs, data, 'tmp_feature', file_format, compress_level) fs.move.move_file(tempfs, 'tmp_feature', self.filesystem, path) return self._save(self.filesystem, data, path, file_format, compress_level)
def test_get_profile_version_electrification(): with TempFS() as tmp_fs: grid_model = "usa_tamu" kind = "building" end_use = "res_cooking" tech = "standard_heat_pump" sub_fs = tmp_fs.makedirs(f"raw/{grid_model}/{kind}", recreate=True) sub_fs.touch(f"{end_use}_{tech}_v1.csv") version = get_profile_version_elec(tmp_fs, grid_model, kind, end_use, tech) v_missing = get_profile_version_elec( tmp_fs, grid_model, kind, end_use, "fake_tech" ) assert "v1" == version[0] assert [] == v_missing
def test_validate_tmpfs_template_wrong_name(): tmp_fs = TempFS() tmp_fs.makedirs('package/report') tmp_fs.create('package/report/template.html.j3') tmp_fs.create('css_file.css') definition = RendererDefinition( root_path=tmp_fs.root_path, id='renderer_id', type='pdf', description='description', template='package/report/template.html.j3', args={'css_file': 'css_file.css'}, ) errors = PDFRenderer.validate(definition) assert f"invalid template name: `{definition.template}`" in errors[0]
def make_package(package_fs, output_fs, output_path, exclude_wildcards, auth_token): """Make a Moya package.""" manifest_filename = "manifest.csv" with TempFS() as temp_fs: manifest = _make_package_fs(package_fs, temp_fs, exclude_wildcards, auth_token=auth_token) with output_fs.open(output_path, 'wb') as dest_file: with ZipFS(dest_file, 'w') as zip_fs: fs.copy.copy_dir(temp_fs, '/', zip_fs, '/') export_manifest(manifest, zip_fs, filename=manifest_filename)
def test_validator_entrypoint_bad_format(report_v2_json): tmp_filesystem = TempFS() tmp_filesystem.create('readme.md') renderer_json_dict = { 'root_path': tmp_filesystem.root_path, 'id': '321', 'type': 'json', 'description': 'JSON Renderer', } renderer = RendererDefinition(**renderer_json_dict) report_dict = report_v2_json( readme_file='readme.md', entrypoint='mypackage', renderers=[renderer], ) report = ReportDefinition( root_path=tmp_filesystem.root_path, **report_dict, ) errors = _validate_report(report) assert len(errors) != 0 assert 'does not follow the package structure' in errors[0]
def test_listdir(self): mount_fs = MountFS() self.assertEqual(mount_fs.listdir("/"), []) m1 = MemoryFS() m3 = MemoryFS() m4 = TempFS() mount_fs.mount("/m1", m1) mount_fs.mount("/m2", "temp://") mount_fs.mount("/m3", m3) with self.assertRaises(MountError): mount_fs.mount("/m3/foo", m4) self.assertEqual(sorted(mount_fs.listdir("/")), ["m1", "m2", "m3"]) m3.makedir("foo") self.assertEqual(sorted(mount_fs.listdir("/m3")), ["foo"])
def extract(self, archive, lib_name): args = self.args namespaces = args.namespaces if not namespaces: namespaces = list(archive.known_namespaces) from ...docgen.extracter import Extracter if args.extract is None: extract_fs = TempFS('moyadoc-{}'.format(lib_name)) else: extract_fs = self.get_fs(join(args.extract, lib_name)) extracter = Extracter(archive, extract_fs) extracter.extract_lib(lib_name) return extract_fs
def startServer(self): port = 3000 self.temp_fs = TempFS() self.server = None self.serve_more_requests = True self.server_thread = threading.Thread(target=self.runServer) self.server_thread.setDaemon(True) self.start_event = threading.Event() self.end_event = threading.Event() self.server_thread.start() self.start_event.wait()
def copy_from(self, file_name, from_dir=None): """Copy a file from data store to userspace. :param str file_name: file name to copy. :param str from_dir: data store directory to copy file from. """ from_dir = "" if from_dir is None else from_dir from_path = self.join(from_dir, file_name) self._check_file_exists(from_path, should_exist=True) print(f"Transferring {file_name} from server") with TempFS() as tmp_fs: self.local_fs.makedirs(from_dir, recreate=True) tmp_fs.makedirs(from_dir, recreate=True) fs2.copy.copy_file(self.fs, from_path, tmp_fs, from_path) fs2.move.move_file(tmp_fs, from_path, self.local_fs, from_path)
def test_render(account_factory, report_factory, report_data): tmp_fs = TempFS() data = report_data(2, 2) renderer = JSONRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), ) output_file = renderer.render(data, f'{tmp_fs.root_path}/report') assert output_file == f'{tmp_fs.root_path}/report.zip' with ZipFile(output_file) as repzip: assert sorted(repzip.namelist()) == ['report.json', 'summary.json'] with repzip.open('report.json') as repfile: assert repfile.read().decode('utf-8') == json.dumps(data)
def test_generate_summary(account_factory, report_factory): tmp_fs = TempFS() account = account_factory() report = report_factory() renderer = JSONRenderer( 'runtime', tmp_fs.root_path, account, report, ) output_file = renderer.generate_summary( f'{tmp_fs.root_path}/summary', start_time=datetime.now(), ) assert output_file == f'{tmp_fs.root_path}/summary.json'
def test_generate_report_generator(account_factory, report_factory): tmp_fs = TempFS() data = ({'key': 'value'} for _ in range(10)) renderer = JSONRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), ) output_file = renderer.render(data, f'{tmp_fs.root_path}/report') assert output_file == f'{tmp_fs.root_path}/report.zip' data = ({'key': 'value'} for _ in range(10)) with ZipFile(output_file) as repzip: assert sorted(repzip.namelist()) == ['report.json', 'summary.json'] with repzip.open('report.json') as repfile: assert repfile.read().decode('utf-8') == orjson.dumps( list(data)).decode('utf-8')
def get_fs(cls, registry, fs_name, fs_name_params, fs_path, writeable, create_dir): from fs.wrapfs.debugfs import DebugFS if fs_path: fs, _path = registry.parse(fs_path, writeable=writeable, create_dir=create_dir) return DebugFS(fs, verbose=False), None if fs_name_params == 'ram': from fs.memoryfs import MemoryFS return DebugFS(MemoryFS(), identifier=fs_name_params, verbose=False), None else: from fs.tempfs import TempFS return DebugFS(TempFS(), identifier=fs_name_params, verbose=False), None
def test_d_and_c(self): from csv import DictReader from old.fetch import download_and_cache from os.path import isfile cache = TempFS() with open(data_path('sources.csv')) as f: for e in DictReader(f): try: d = download_and_cache(SourceSpec(**e), cache) except ModuleNotFoundError: # For when metatab isn't installed. continue self.assertTrue(isfile(d['sys_path']))
def test_render(account_factory, report_factory): with TempFS() as tmp_fs: data = [['line1'], ['line2']] renderer = CSVRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), ) output_file = renderer.render(data, f'{tmp_fs.root_path}/report') assert output_file == f'{tmp_fs.root_path}/report.zip' with ZipFile(output_file) as repzip: assert sorted(repzip.namelist()) == ['report.csv', 'summary.json'] with repzip.open('report.csv') as repfile: content = repfile.read().decode('utf-8').split() assert content[0] == f'"{data[0][0]}"' assert content[1] == f'"{data[1][0]}"'
def cli_validator_dual_auth(cli_setup_dual_auth, validator): _, api, _, prefix = cli_setup_dual_auth try: api.delete_archive('my_archive') except KeyError: pass validator.call_engines['datafs'] = ClickValidator(app=cli, prefix=prefix) api.attach_authority('my_authority', TempFS()) try: yield validator.teststring finally: api._authorities['my_authority'].fs.close() del validator.call_engines['datafs']
def test_listdir(self): mount_fs = MountFS() self.assertEqual(mount_fs.listdir('/'), []) m1 = MemoryFS() m3 = MemoryFS() m4 = TempFS() mount_fs.mount('/m1', m1) mount_fs.mount('/m2', 'temp://') mount_fs.mount('/m3', m3) with self.assertRaises(MountError): mount_fs.mount('/m3/foo', m4) self.assertEqual( sorted(mount_fs.listdir('/')), ['m1', 'm2', 'm3'] ) m3.makedir('foo') self.assertEqual( sorted(mount_fs.listdir('/m3')), ['foo'] )
def test_validate_tmpfs_css_missing(): tmp_fs = TempFS() tmp_fs.makedirs('package/report') tmp_fs.create('package/report/template.html.j2') definition = RendererDefinition( root_path=tmp_fs.root_path, id='renderer_id', type='pdf', description='description', template='package/report/template.html.j2', args={'css_file': 'package/report/css_file.css'}, ) errors = PDFRenderer.validate(definition) assert f"css_file `{definition.args['css_file']}` not found." == errors[0]
def test_delayed_flo(self): from csv import DictReader cache = TempFS() success = [] errors = [] with open(data_path('sources.csv')) as f: for e in DictReader(f): if e['name'] in ('simple_fixed', ): continue if e['name'] not in ('zip_no_xls', ): continue s = SourceSpec(**e) print(s.dict) d = get_generator(s, cache) print(s._url, len(list(d)))
def test_generate_report_dict(account_factory, report_factory): tmp_fs = TempFS() data = { 'key': 'value', 'int': 3, 'float': 3.4, 'datetime': datetime.now(), 'date': date.today(), 'time': time(12, 11, 10), } renderer = JSONRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), ) output_file = renderer.render(data, f'{tmp_fs.root_path}/report') assert output_file == f'{tmp_fs.root_path}/report.zip' with ZipFile(output_file) as repzip: assert sorted(repzip.namelist()) == ['report.json', 'summary.json'] with repzip.open('report.json') as repfile: assert repfile.read().decode('utf-8') == orjson.dumps(data).decode( 'utf-8')
def snapshot(self, path): """Takes a snapshot of an individual file.""" # try grabbing the temp filesystem system path temp_dir = None temp_dir = self.tmp.getsyspath('/') # Create a temp file system to be snapshotted temp_snapshot_fs = TempFS(temp_dir=temp_dir) src_path = temp_snapshot_fs.getsyspath('/') with self.fs.open(path, 'rb') as source_file: with temp_snapshot_fs.open('datafile', 'wb') as temp_file: shutil.copyfileobj(source_file, temp_file) # snapshot destination directory dest_dir = self.snapshot_snap_path(path) command = [ 'rdiff-backup', '--parsable-output', '--no-eas', '--no-file-statistics', '--no-acls', '--tempdir', self.tmp.getsyspath('/'), src_path, dest_dir ] # speed up the tests if self.__testing: command.insert(5, '--current-time') command.insert(6, str(self.__testing['time'])) self.__testing['time'] += 1 process = Popen(command, stdout=PIPE, stderr=PIPE) stderr = process.communicate()[1] ignore = [lambda x: x.startswith("Warning: could not determine case")] if len(stderr) is not 0: for rule in ignore: if not rule(stderr): raise SnapshotError(stderr) # close the temp snapshot filesystem temp_snapshot_fs.close()
class TestFUSE(unittest.TestCase,FSTestCases,ThreadingTestCases): def setUp(self): self.temp_fs = TempFS() self.temp_fs.makedir("root") self.temp_fs.makedir("mount") self.mounted_fs = self.temp_fs.opendir("root") self.mount_point = self.temp_fs.getsyspath("mount") self.fs = OSFS(self.temp_fs.getsyspath("mount")) self.mount_proc = fuse.mount(self.mounted_fs,self.mount_point) def tearDown(self): self.mount_proc.unmount() try: self.temp_fs.close() except OSError: # Sometimes FUSE hangs onto the mountpoint if mount_proc is # forcibly killed. Shell out to fusermount to make sure. fuse.unmount(self.mount_point) self.temp_fs.close() def check(self,p): return self.mounted_fs.exists(p)
from fs.tempfs import TempFS try: from pyvirtualdisplay import Display except ImportError: Display = None try: import git except ImportError: SHA = "" else: REPO = git.Repo(search_parent_directories=True) SHA = REPO.head.object.hexsha ROOT_FS = TempFS(identifier=u"fs_filepicker_{}".format(SHA)) ROOT_DIR = ROOT_FS.root_path TESTDATA_DIR = u'testdata' SUB_DIRS = [ fs.path.join(u"testdata", u"foo"), fs.path.join(u"testdata", u"bar"), fs.path.join(u"testdata", u"empty") ] def setup_testdata(): for testdir in SUB_DIRS: if not ROOT_FS.exists(testdir): ROOT_FS.makedirs(testdir) data_fs = fs.open_fs(fs.path.join(ROOT_DIR, TESTDATA_DIR))
import fs from fs.tempfs import TempFS try: import git except ImportError: SHA = "" else: repo = git.Repo(os.path.dirname(os.path.realpath(__file__)), search_parent_directories=True) SHA = repo.head.object.hexsha[0:10] CACHED_CONFIG_FILE = None SERVER_CONFIG_FILE = "mswms_settings.py" MSCOLAB_CONFIG_FILE = "mscolab_settings.py" ROOT_FS = TempFS(identifier=f"msui{SHA}") OSFS_URL = ROOT_FS.geturl("", purpose="fs") ROOT_DIR = ROOT_FS.getsyspath("") if not ROOT_FS.exists("msui/testdata"): ROOT_FS.makedirs("msui/testdata") SERVER_CONFIG_FS = fs.open_fs(fs.path.join(ROOT_DIR, "msui")) DATA_FS = fs.open_fs(fs.path.join(ROOT_DIR, "msui/testdata")) MSUI_CONFIG_PATH = OSFS_URL # MSUI_CONFIG_PATH = SERVER_CONFIG_FS.getsyspath("") would use a none osfs path os.environ["MSUI_CONFIG_PATH"] = MSUI_CONFIG_PATH SERVER_CONFIG_FILE_PATH = fs.path.join(SERVER_CONFIG_FS.getsyspath(""), SERVER_CONFIG_FILE)
def setUp(self): rootfs = TempFS() backup = TempFS(temp_dir=rootfs.getsyspath('/')) self.fs = VersioningFS(rootfs, backup=backup, tmp=TempFS())
#write(wrap_prefix(prefix[:-1] + ' ') + wrap_error('max recursion levels reached')) else: print_dir(fs, pathjoin(path, item), levels[:] + [is_last_item]) else: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_filename(item))) return len(dir_listing) print_dir(fs, path) return dircount[0], filecount[0] if __name__ == "__main__": from fs.tempfs import TempFS from six import b t1 = TempFS() t1.setcontents("foo", b("test")) t1.makedir("bar") t1.setcontents("bar/baz", b("another test")) t1.tree() t2 = TempFS() print t2.listdir() movedir(t1, t2) print t2.listdir() t1.tree() t2.tree()
def run_install(self): args = self.args console = self.console installed = [] install_package = args.package install_select = package_select = self.call('package.select', package=install_package) install_notes = package_select['notes'] if package_select['version'] is None: raise CommandError("no install candidate for '{}', run 'moya-pm list' to see available packages".format(install_package)) package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = package_select['md5'] download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] libs = [] output_fs = fsopendir(args.output) force = args.force installed_libs = {} archive = None if not args.download: try: application = WSGIApplication(self.location, args.settings, disable_autoreload=True) archive = application.archive if archive is None: console.text('unable to load project, use the --force switch to force installation') return -1 except Exception as e: if not args.force: console.exception(e) console.text('unable to load project, use the --force switch to force installation') return -1 else: libs = [(lib.long_name, lib.version, lib.install_location) for lib in archive.libs.values() if lib.long_name == package_name] installed_libs = archive.libs.copy() if not force: for name, version, location in libs: if name == package_name: if version > install_version: if not args.force: raise CommandError("a newer version ({}) is already installed, use --force to force installation".format(version)) elif install_version == version: if not args.force: raise CommandError("version {} is already installed, use --force to force installation".format(version)) else: if not args.upgrade: raise CommandError("an older version ({}) is installed, use --upgrade to force upgrade".format(version)) force = True username = self.settings.get('upload', 'username', None) password = self.settings.get('upload', 'password', None) if username and password: auth = (username, password) else: auth = None install_app = args.app or package_name.split('.')[-1] packages = dependencies.gather_dependencies(self.rpc, install_app, args.mount, install_package, console, no_deps=args.no_deps) if not args.no_add: for package_name, (app_name, mount, package_select) in packages.items(): if package_select['version'] is None: raise CommandError("no install candidate for required package '{}', run 'moya-pm list {}' to see available packages".format(package_name, package_name)) download_temp_fs = TempFS() for package_name, (app_name, mount, package_select) in packages.items(): package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = "{}-{}.{}".format(package_name, install_version, package_select['md5']) download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] with download_temp_fs.open(filename, 'wb') as package_file: checksum = downloader.download(download_url, package_file, console=console, auth=auth, verify_ssl=False, msg="requesting {name}=={version}".format(**package_select)) if checksum != package_select['md5']: raise CommandError("md5 checksum of download doesn't match server! download={}, server={}".format(checksum, package_select['md5'])) if args.download: with fsopendir(args.download) as dest_fs: fs.utils.copyfile(download_temp_fs, filename, dest_fs, package_filename) if args.download: return 0 changed_server_xml = False for package_name, (app_name, mount, package_select) in packages.items(): package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = "{}-{}.{}".format(package_name, install_version, package_select['md5']) download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] install_location = relativefrom(self.location, pathjoin(self.location, args.output, package_select['name'])) package_select['location'] = install_location with download_temp_fs.open(filename, 'rb') as package_file: with ZipFS(package_file, 'r') as package_fs: with output_fs.makeopendir(package_select['name']) as lib_fs: #if not lib_fs.isdirempty('/') and not force: # raise CommandError("install directory is not empty, use --force to erase and overwrite") fs.utils.remove_all(lib_fs, '/') fs.utils.copydir(package_fs, lib_fs) installed.append((package_select, mount)) if not args.no_add and archive: server_xml = archive.cfg.get('project', 'startup') changed_server_xml =\ installer.install(project_path=self.location, server_xml_location=archive.cfg.get('project', 'location'), server_xml=server_xml, server_name=application.server_ref, lib_path=install_location, lib_name=package_name, app_name=app_name, mount=mount) table = [] for _package, mount in installed: table.append([Cell("{name}=={version}".format(**_package), fg="magenta", bold=True), Cell(_package['location'], fg="blue", bold=True), Cell(mount or '', fg="cyan", bold=True)]) if table: console.table(table, ['package', 'location', 'mount']) if install_notes: console.table([[install_notes]], ['{} v{} release notes'.format(install_select['name'], install_select['version'])]) if changed_server_xml: console.text("moya-pm modified '{}' -- please check changes".format(server_xml), fg="green", bold="yes")
def test_remove_all(self): """Test remove_all function""" fs = TempFS() fs.setcontents("f1", "file 1") fs.setcontents("f2", "file 2") fs.setcontents("f3", "file 3") fs.makedir("foo/bar", recursive=True) fs.setcontents("foo/bar/fruit", "apple") fs.setcontents("foo/baz", "baz") utils.remove_all(fs, "foo/bar") self.assert_(not fs.exists("foo/bar/fruit")) self.assert_(fs.exists("foo/bar")) self.assert_(fs.exists("foo/baz")) utils.remove_all(fs, "") self.assert_(not fs.exists("foo/bar/fruit")) self.assert_(not fs.exists("foo/bar/baz")) self.assert_(not fs.exists("foo/baz")) self.assert_(not fs.exists("foo")) self.assert_(not fs.exists("f1")) self.assert_(fs.isdirempty('/'))
def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.exporter = CSVExporter(self.fs, m.HTMLMarkupExporter(self))
def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.exporter = SuperMemoQAExporter(self.fs) self.exporter.index_file = 'out.txt'
class TestSuperMemoQAImport (TestCase): def setUp(self): TestCase.setUp(self) self.fs = TempFS() factory = m.ImportedInstanceFactory(self, field_types={ 'question': 'html', 'answer': 'html' }) self.importer = SuperMemoQAImporter(self.fs, factory, m.HTMLMarkupImporter(self)) self.cos = [] self.images = [] self.sounds = [] def test_single_card(self): data = u"Q: question 1\nA: answer 1" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(1, len(self.cos)) assert_equals(unicode, type(self.cos[0]['question'])) assert_equals(unicode, type(self.cos[0]['answer'])) assert_equals(u"question 1", self.cos[0]['question']) assert_equals(u"answer 1", self.cos[0]['answer']) def test_windows_line_endings(self): data = u"Q: question 1\r\nA: answer 1" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u"question 1", self.cos[0]['question']) assert_equals(u"answer 1", self.cos[0]['answer']) def test_multiple_cards(self): data = """Q: question A: answer Q: question 2 A: answer 2""" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(2, len(self.cos)) assert_equals(u"question", self.cos[0]['question']) assert_equals(u"answer", self.cos[0]['answer']) assert_equals(u"question 2", self.cos[1]['question']) assert_equals(u"answer 2", self.cos[1]['answer']) def test_content_is_right_trimmed(self): data = u"Q: question \nA: answer \n" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u"question", self.cos[0]['question']) assert_equals(u"answer", self.cos[0]['answer']) def test_multiline_question_and_answer(self): data = """Q: question Q: end of question A: answer A: end of answer""" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u"question\nend of question", self.cos[0]['question']) assert_equals(u"answer\nend of answer", self.cos[0]['answer']) def test_multiline_question_and_answer_lines_are_rtrimmed(self): data = "Q: question \nQ: end of question \nA: answer \nA: end of answer " self.fs.setcontents('cards.txt', data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u"question\nend of question", self.cos[0]['question']) assert_equals(u"answer\nend of answer", self.cos[0]['answer']) def test_custom_encoding(self): data = (u"Q: być szczerym\nA: to be frank").encode('cp1250') self.fs.setcontents('cards.txt', data) self.importer.encoding = 'cp1250' self.importer() assert_equals(1, len(self.cos)) assert_equals(unicode, type(self.cos[0]['question'])) assert_equals(unicode, type(self.cos[0]['answer'])) assert_equals(u"być szczerym", self.cos[0]['question']) assert_equals(u"to be frank", self.cos[0]['answer']) def test_html_tags_are_preserved(self): data = """Q: hist: When did we <b>land on the moon</b>? A: 1969 <i>(July 20)</i>""" self.fs.setcontents('cards.txt', data) self.importer() assert_equals(u"hist: When did we <b>land on the moon</b>?", self.cos[0]['question']) assert_equals(u"1969 <i>(July 20)</i>", self.cos[0]['answer']) def test_card_with_image(self): data = u"""Q: <img src="image.jpg" /> A: answer""" self.fs.setcontents('index.txt', data) image_data = self.data.getcontents('small.jpg') self.fs.setcontents('image.jpg', image_data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u'<img src="/images/image.jpg"/>', self.cos[0]['question']) assert_equals(u"answer", self.cos[0]['answer']) assert_equals(1, len(self.images)) assert_equals('image.jpg', self.images[0]['filename']) assert_equals('image/jpeg', self.images[0]['mime_type']) assert_true(image_data == self.images[0]['data']) def test_card_with_index_in_subdirectory_and_image(self): data = u'Q: <img src="image.jpg" />\nA: answer' self.fs.makedir('dir') self.fs.setcontents('dir/index.txt', data) image_data = self.data.getcontents('small.jpg') self.fs.setcontents('dir/image.jpg', image_data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u'<img src="/images/image.jpg"/>', self.cos[0]['question']) assert_equals(u"answer", self.cos[0]['answer']) assert_equals(1, len(self.images)) assert_equals('image.jpg', self.images[0]['filename']) assert_equals('image/jpeg', self.images[0]['mime_type']) assert_true(image_data == self.images[0]['data']) def test_card_with_audio(self): data = u"""Q: <span class="audio autoplay"><a href="button.mp3" /></span> A: answer""" self.fs.setcontents('index.txt', data) sound_data = self.data.getcontents('button.mp3') self.fs.setcontents('button.mp3', sound_data) self.importer() assert_equals(1, len(self.cos)) assert_equals(u'<span class="audio autoplay"><a href="/sounds/button.mp3"/></span>', self.cos[0]['question']) assert_equals(u"answer", self.cos[0]['answer']) assert_equals(1, len(self.sounds)) assert_equals('button.mp3', self.sounds[0]['filename']) assert_equals('audio/mpeg', self.sounds[0]['mime_type']) assert_true(sound_data == self.sounds[0]['data']) def test_byte_order_mark_in_utf8_files_is_removed(self): data = u'\ufeffQ: \uac00\uac8c\r\nA: store' self.fs.setcontents('index.txt', data.encode('utf8')) self.importer() assert_equals(1, len(self.cos)) assert_equals(u"\uac00\uac8c", self.cos[0]['question']) assert_equals(u"store", self.cos[0]['answer']) def test_invalid_xml_results_in_input_error(self): data = u'Q: <b>question\nA: answer' self.fs.setcontents('index.txt', data) assert_raises(ConversionFailure, self.importer) def test_invalid_fields_number_in_input_error(self): data = u'Q: question' self.fs.setcontents('index.txt', data) assert_raises(ConversionFailure, self.importer)
else: print_dir(fs, pathjoin(path, item), levels[:] + [is_last_item]) else: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_filename(item))) return len(dir_listing) print_dir(fs, path) return dircount[0], filecount[0] if __name__ == "__main__": from fs.tempfs import TempFS from six import b t1 = TempFS() t1.setcontents("foo", b("test")) t1.makedir("bar") t1.setcontents("bar/baz", b("another test")) t1.tree() t2 = TempFS() print t2.listdir() movedir(t1, t2) print t2.listdir() t1.tree() t2.tree()
successful = [] def ready_callback(): successful.append(True) os.write(w, "S") os.close(w) opts["ready_callback"] = ready_callback try: mount(fs, path, **opts) except Exception: pass if not successful: os.write(w, "E") if __name__ == "__main__": import os, os.path from fs.tempfs import TempFS mount_point = os.path.join(os.environ["HOME"], "fs.expose.fuse") if not os.path.exists(mount_point): os.makedirs(mount_point) def ready_callback(): print "READY" mount(TempFS(), mount_point, foreground=True, ready_callback=ready_callback)
def test_os_fs_create_dir(self): _fs = TempFS(identifier="mss") _dir = _fs.getsyspath("") os_fs_create_dir(_dir) assert os.path.exists(_dir)
def setUp(self): self._check_interval = sys.getcheckinterval() sys.setcheckinterval(10) self.wrapped_fs = TempFS() self.fs = CacheFS(self.wrapped_fs,cache_timeout=0.01)
def setUp(self): TestCase.setUp(self) self.fs = TempFS() self.importer = Importer()