def test_nonexistent_location(fs_loader): path = "./folder/subfolder/new-eopatch/" empty_eop = EOPatch() with fs_loader() as temp_fs: with pytest.raises(ResourceNotFound): EOPatch.load(path, filesystem=temp_fs) empty_eop.save(path, filesystem=temp_fs) with TempFS() as temp_fs: full_path = os.path.join(temp_fs.root_path, path) with pytest.raises(CreateFailed): EOPatch.load(full_path) load_task = LoadTask(full_path) with pytest.raises(CreateFailed): load_task.execute() empty_eop.save(full_path) assert os.path.exists(full_path) with TempFS() as temp_fs: full_path = os.path.join(temp_fs.root_path, path) save_task = SaveTask(full_path) save_task.execute(empty_eop) assert os.path.exists(full_path)
def test_use_local_filesystem_as_transfer_area(self): log_file = self.get_resource('crashplan_backup_files.log') # Create a directory tree that can be used as a transfer area with TempFS() as transfer_area: transfer_area.makedirs(u'/my/crashplan/backups') new_file = u'/my/crashplan/backups/foo.txt' with CrashPlanFS(log_file=log_file.strpath, _local_fs_root=transfer_area.root_path) as fs: assert not transfer_area.exists(new_file) assert not fs.exists(new_file) fs.touch(new_file) assert transfer_area.exists(new_file) # Create a directory tree that cannot be mapped to the remote directory with TempFS() as transfer_area: transfer_area.makedirs(u'/unmapped/crashplan/backups') new_file = u'/my/crashplan/backups/foo.txt' with CrashPlanFS(log_file=log_file.strpath, _local_fs_root=transfer_area.root_path) as fs: assert not transfer_area.exists(new_file) assert not fs.exists(new_file) fs.touch(new_file) assert not transfer_area.exists(new_file)
def setUp(self): rootfs = TempFS() backup = TempFS(temp_dir=rootfs.getsyspath('/')) self.fs = VersioningFS(rootfs, backup=backup, tmp=TempFS(), testing={'time': 1})
def test_nonexistent_location(self): path = './folder/subfolder/new-eopatch/' empty_eop = EOPatch() for fs_loader in self.filesystem_loaders: with fs_loader() as temp_fs: with self.assertRaises(ResourceNotFound): EOPatch.load(path, filesystem=temp_fs) empty_eop.save(path, filesystem=temp_fs) with TempFS() as temp_fs: full_path = os.path.join(temp_fs.root_path, path) with self.assertRaises(CreateFailed): EOPatch.load(full_path) load_task = LoadTask(full_path) with self.assertRaises(CreateFailed): load_task.execute() empty_eop.save(full_path) self.assertTrue(os.path.exists(full_path)) with TempFS() as temp_fs: full_path = os.path.join(temp_fs.root_path, path) save_task = SaveTask(full_path) save_task.execute(empty_eop) self.assertTrue(os.path.exists(full_path))
def test_copydir_indir(self): """Test copydir in a directory""" fs1 = MemoryFS() fs2 = MemoryFS() self._make_fs(fs1) utils.copydir(fs1, (fs2, "copy")) self._check_fs(fs2.opendir("copy")) fs1 = TempFS() fs2 = TempFS() self._make_fs(fs1) utils.copydir(fs1, (fs2, "copy")) self._check_fs(fs2.opendir("copy"))
def test_copydir_root(self): """Test copydir from root""" fs1 = MemoryFS() self._make_fs(fs1) fs2 = MemoryFS() utils.copydir(fs1, fs2) self._check_fs(fs2) fs1 = TempFS() self._make_fs(fs1) fs2 = TempFS() utils.copydir(fs1, fs2) self._check_fs(fs2)
def test_listdir(self): mount_fs = MountFS() self.assertEqual(mount_fs.listdir('/'), []) m1 = MemoryFS() m2 = TempFS() m3 = MemoryFS() m4 = TempFS() mount_fs.mount('/m1', m1) mount_fs.mount('/m2', m2) mount_fs.mount('/m3', m3) with self.assertRaises(MountError): mount_fs.mount('/m3/foo', m4) self.assertEqual(sorted(mount_fs.listdir('/')), ['m1', 'm2', 'm3']) m3.makedir('foo') self.assertEqual(sorted(mount_fs.listdir('/m3')), ['foo'])
def tmp_dir(self) -> TempFS: """ The temporary directory used to download modules before installing them if needed. """ if not self._tmp_dir: self._tmp_dir = TempFS(TEMP_DIR_NAME) return self._tmp_dir
def test_validator_entrypoint_bad_directory_structure(report_v2_json, param_json): tmp_filesystem = TempFS() tmp_filesystem.create('readme.md') renderer_json_dict = { 'root_path': tmp_filesystem.root_path, 'id': '321', 'type': 'json', 'description': 'JSON Renderer', 'default': True, } renderer = RendererDefinition(**renderer_json_dict) parameter = ParameterDefinition(**param_json()) report_dict = report_v2_json( readme_file='readme.md', renderers=[renderer], parameters=[parameter], ) report = ReportDefinition( root_path=tmp_filesystem.root_path, **report_dict, ) errors = _validate_report(report) assert len(errors) != 0 assert 'directory structure does not match' in errors[0]
def test_render(account_factory, report_factory, report_data, extra_context): class DummyRenderer(BaseRenderer): def generate_report(self, data, output_file): output_file = f'{output_file}.ext' open(output_file, 'w').write(str(data)) return output_file tmp_fs = TempFS() data = report_data(2, 2) renderer = DummyRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), ) renderer.set_extra_context(extra_context) ctx = renderer.get_context(data) output_file = renderer.render(data, f'{tmp_fs.root_path}/report') assert output_file == f'{tmp_fs.root_path}/report.zip' with ZipFile(output_file) as repzip: assert sorted(repzip.namelist()) == ['report.ext', 'summary.json'] with repzip.open('report.ext') as repfile: assert repfile.read().decode('utf-8') == str(data) if extra_context: assert 'name' in ctx['extra_context'] assert 'desc' in ctx['extra_context']
def test_render_tmpfs_ok(report_data, account_factory, report_factory): tmp_fs = TempFS() tmp_fs.makedirs('package/report') with tmp_fs.open('package/report/template.html.j2', 'w') as fp: fp.write(''' <html> <head><title>PDF Report</title></head> <body> <ul> {% for item in data %} <li>{{item[0]}} {{item[1]}}</li> {% endfor %} </ul> </body> </html> ''') renderer = PDFRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), template='package/report/template.html.j2', ) data = report_data(2, 2) path_to_output = f'{tmp_fs.root_path}/package/report/report' output_file = renderer.render(data, path_to_output) assert output_file == f'{path_to_output}.zip' with ZipFile(output_file) as zip_file: assert sorted(zip_file.namelist()) == ['report.pdf', 'summary.json'] with zip_file.open('report.pdf', 'r') as fp: assert 'PDF Report' in str(fp.read())
def setUp(self) -> None: # Build a working bundle containing a collection; both include # a set of files, not all on the same level. b_lidvid = LIDVID("urn:nasa:pds:b::1.2") b_files = { "foo.txt": "Hello, world!", "counter.txt": "12345", "subdir": { "subdir.txt": "xxx" }, } c_lidvid = LIDVID("urn:nasa:pds:b:c::1.1") c_files = { "undersea.txt": "I'm under c!", "deeper": { "leagues40k.txt": "Captain Nemo" }, } self.tempfs = TempFS() mv = Multiversioned(self.tempfs) mv[b_lidvid] = dictionary_to_contents({c_lidvid}, b_files) mv[c_lidvid] = dictionary_to_contents(set(), c_files) # Add a second version of the bundle containing nothing, just # to check that they stay independent. b2_lidvid = LIDVID("urn:nasa:pds:b::2.0") b2_files: Dict[Any, Any] = dict() mv[b2_lidvid] = dictionary_to_contents(set(), b2_files) self.vv = VersionView(mv, b_lidvid) self.vv2 = VersionView(mv, b2_lidvid) self.mv = mv
def test_remove_dir_multi_versions_remove(api, auth1): cache = TempFS() api.attach_authority('auth1', auth1) api.attach_cache(cache) with open('test_file.txt', 'w+') as f: f.write(u'this is an upload test') var = api.create('archive1', authority_name='auth1', versioned=True) var.update('test_file.txt', cache=True) with var.open('w') as f: f.write(u'update update') assert len(var.get_versions()) == 2 var.delete() with pytest.raises(KeyError): api.get_archive('archive1') with pytest.raises(ResourceNotFoundError): api._authorities['auth1'].fs.open('archive1', 'r') with pytest.raises(ResourceNotFoundError): api.cache.fs.open('archive1', 'r') assert api.listdir('', authority_name='auth1') == []
def test_delete_handling(api, auth1): cache = TempFS() api.attach_authority('auth1', auth1) api.attach_cache(cache) with open('test_file.txt', 'w+') as f: f.write(u'this is an upload test') var = api.create('archive1', authority_name='auth1', versioned=False) var.update('test_file.txt', cache=True) assert os.path.isfile(api.cache.fs.getsyspath('archive1')) var.update('test_file.txt', remove=True) assert not os.path.isfile('test_file.txt') var.delete() with pytest.raises(KeyError): api.get_archive('archive1') with pytest.raises(ResourceNotFoundError): f = api._authorities['auth1'].fs.open('archive1', 'r') with pytest.raises(ResourceNotFoundError): f = api.cache.fs.open('archive1', 'r')
def test_remove(self): with TempFS() as tfs: tfs.touch('test') f = File('test', tfs) self.assertTrue(tfs.exists('test')) f.remove() self.assertFalse(tfs.exists('test'))
def test_render_tmpfs_ok(account_factory, report_factory, report_data): tmp_fs = TempFS() tmp_fs.makedirs('package/report') wb = Workbook() ws = wb.active ws.title = 'Data' ws.cell(1, 1, value='Name') ws.cell(1, 2, value='Description') wb.save(f'{tmp_fs.root_path}/package/report/template.xlsx') renderer = XLSXRenderer( 'runtime', tmp_fs.root_path, account_factory(), report_factory(), template='package/report/template.xlsx', ) data = report_data(2, 2) path_to_output = f'{tmp_fs.root_path}/package/report/report' output_file = renderer.render(data, path_to_output, start_time=datetime.now()) wb = load_workbook(output_file) ws = wb['Data'] assert output_file == f'{path_to_output}.xlsx' assert data == [[ws[f'A{item}'].value, ws[f'B{item}'].value] for item in range(2, 4)]
def test_remove(self): with TempFS() as tfs: tfs.makedir('test') d = Directory('test', tfs) self.assertTrue(tfs.exists('test')) d.remove() self.assertFalse(tfs.exists('test'))
def download_data_networks(): selection = parse_qs(decode(request.query_string)) print('Received this selection to download: {}'.format(selection)) if not is_valid(selection): return 'Nothing to download!' wikis = extract_wikis_from_selection_dict(selection) network_code = selection['network'][0] lower_bound = '' upper_bound = '' if 'lower_bound' and 'upper_bound' in selection.keys(): lower_bound = int(selection['lower_bound'][0]) upper_bound = int(selection['upper_bound'][0]) upper_bound = datetime.fromtimestamp(upper_bound).strftime( "%Y-%m-%d %H:%M:%S") lower_bound = datetime.fromtimestamp(lower_bound).strftime( "%Y-%m-%d %H:%M:%S") network = data_controller.get_network(wikis[0], network_code, lower_bound, upper_bound) tmp = TempFS() tmp.create('network.gml') path = tmp.getsyspath('/network.gml') network.write_gml(file=path) return send_file(filename_or_fp=path, as_attachment=True, attachment_filename='network.gml')
def test_loader_methods(self): t = TempFS() self._init_modules(t) ih = FSImportHook(t) sys.meta_path.append(ih) try: self.assertEqual(ih.find_module("fsih_hello"), ih) self.assertEqual(ih.find_module("fsih_helo"), None) self.assertEqual(ih.find_module("fsih_pkg"), ih) self.assertEqual(ih.find_module("fsih_pkg.sub1"), ih) self.assertEqual(ih.find_module("fsih_pkg.sub2"), ih) self.assertEqual(ih.find_module("fsih_pkg.sub3"), None) m = ih.load_module("fsih_hello") self.assertEqual(m.message, "hello world!") self.assertRaises(ImportError, ih.load_module, "fsih_helo") ih.load_module("fsih_pkg") m = ih.load_module("fsih_pkg.sub1") self.assertEqual(m.message, "hello world!") self.assertEqual(m.a, 42) m = ih.load_module("fsih_pkg.sub2") self.assertEqual(m.message, "hello world!") self.assertEqual(m.a, 42 * 2) self.assertRaises(ImportError, ih.load_module, "fsih_pkg.sub3") finally: sys.meta_path.remove(ih) t.close()
def install_packages(self, output_fs, selected_packages, application=None): """Install packages""" download_fs = TempFS() install_packages = [] for index, (_, select_package) in enumerate(selected_packages): app_name = self.args.app or select_package["name"].split( ".", 1)[-1].replace(".", "") _install = self.download_package( download_fs, select_package, app=app_name if index == 0 else None, mount=self.args.mount if index == 0 else None, ) install_packages.append(_install) installed = [] if application: cfg = application.archive.cfg else: cfg = build.read_config(self.location, self.args.settings) changed_server = False for _package in install_packages: _changed_server, _installed_packages = self.install_package( download_fs, output_fs, _package, cfg=cfg) installed.extend(_installed_packages) changed_server = changed_server or _changed_server table = [] for _package, mount in installed: table.append([ Cell("{name}".format(**_package), fg="magenta", bold=True), Cell("{version}".format(**_package)), Cell(_package["location"], fg="blue", bold=True), Cell(mount or "", fg="cyan", bold=True), ]) if table: self.console.table(table, ["package", "version", "location", "mount"]) if application is not None: archive = application.archive logic_location = archive.cfg.get("project", "location") server_xml = archive.cfg.get("project", "startup") server_xml = archive.project_fs.getsyspath( join(logic_location, server_xml)) if changed_server: self.console.text( "moya-pm modified '{}' -- please check changes".format( server_xml), fg="green", bold="yes", )
def test_movedir_root(self): """Test movedir to root dir""" fs1 = MemoryFS() fs2 = MemoryFS() fs1sub = fs1.makeopendir("from") self._make_fs(fs1sub) utils.movedir((fs1, "from"), fs2) self.assert_(not fs1.exists("from")) self._check_fs(fs2) fs1 = TempFS() fs2 = TempFS() fs1sub = fs1.makeopendir("from") self._make_fs(fs1sub) utils.movedir((fs1, "from"), fs2) self.assert_(not fs1.exists("from")) self._check_fs(fs2)
def test_movedir_indir(self): """Test movedir in a directory""" fs1 = MemoryFS() fs2 = MemoryFS() fs1sub = fs1.makeopendir("from") self._make_fs(fs1sub) utils.movedir((fs1, "from"), (fs2, "copy")) self.assert_(not fs1.exists("from")) self._check_fs(fs2.opendir("copy")) fs1 = TempFS() fs2 = TempFS() fs1sub = fs1.makeopendir("from") self._make_fs(fs1sub) utils.movedir((fs1, "from"), (fs2, "copy")) self.assert_(not fs1.exists("from")) self._check_fs(fs2.opendir("copy"))
def __init__(self, dfvfs, partition, windows_system): super(RegistryFileOpener, self).__init__() self.dfvfs = dfvfs self.partition = partition self.not_present = set() self.open_handles = [] self.tmpfs = TempFS() self.windows_system = windows_system
def setUp(self): self.temp_fs = TempFS() self.temp_fs.makedir("root") self.temp_fs.makedir("mount") self.mounted_fs = self.temp_fs.opendir("root") self.mount_point = self.temp_fs.getsyspath("mount") self.fs = OSFS(self.temp_fs.getsyspath("mount")) self.mount_proc = fuse.mount(self.mounted_fs, self.mount_point)
def upload_docs(self, lib_name, lib_version): args = self.args archive, lib = build.build_lib(args.location, ignore_errors=True) lib_name = lib.long_name from ..docgen.extracter import Extracter extract_fs = TempFS('moyadoc-{}'.format(lib_name)) extracter = Extracter(archive, extract_fs) extracter.extract_lib(lib_name) _fh, temp_filename = tempfile.mkstemp('moyadocs') with ZipFS(temp_filename, 'w') as docs_zip_fs: fs.copy.copy_dir(extract_fs, '/', docs_zip_fs, '/') package_filename = "{}-{}.docs.zip".format(lib_name, lib_version) upload_info = self.call('package.get-upload-info') docs_url = upload_info['docs_url'] self.console("uploading '{}'...".format(package_filename)).nl() with io.open(temp_filename, 'rb') as package_file: files = [('file', (package_filename, package_file, 'application/octet-stream'))] data = { "auth": self.auth_token, "package": lib_name, "version": lib_version } response = requests.post(docs_url, verify=False, files=files, data=data, hooks={}) if response.status_code != 200: raise CommandError( "upload failed -- server returned {} response".format( response.status_code)) message = decode_utf8_bytes( response.headers.get('moya-upload-package-message', '')) result = decode_utf8_bytes( response.headers.get('moya-upload-package-result', '')) if result == 'success': self.server_response(message, fg="green") else: raise CommandError('upload error ({})'.format(message)) if result == "success": pass else: self.console.error("upload failed")
def test_bushy_pyfs(test_file): from fs.memoryfs import MemoryFS from fs.tempfs import TempFS from fs.errors import ResourceNotFound from roughrider.storage.pyfs import PyFSStorage from roughrider.storage.meta import FileInfo class BushyStorage(PyFSStorage): count = None def generate_ticket(self) -> str: if self.count is None: self.count = 0 self.count += 1 return str(uuid.UUID(int=self.count)) def ticket_to_uri(self, uid: str) -> Path: return Path(f'{uid[0:4]}/{uid[4:8]}/{uid[9:]}') storage = BushyStorage('bushy', fs=MemoryFS()) storage_info = storage.store(test_file) assert storage_info == FileInfo( namespace='bushy', ticket='00000000-0000-0000-0000-000000000001', size=28, checksum=('md5', '53195454e1210adae36ecb34453a1f5a'), metadata={}) iterator = storage.retrieve(storage_info['ticket']) assert isinstance(iterator, Iterator) test_file.seek(0) assert b''.join(iterator) == test_file.read() test_file.seek(0) storage.delete(storage_info['ticket']) with pytest.raises(ResourceNotFound): storage.delete(storage_info['ticket']) storage = BushyStorage('bushy', fs=TempFS()) storage_info = storage.store(test_file) assert storage_info == FileInfo( namespace='bushy', ticket='00000000-0000-0000-0000-000000000001', size=28, checksum=('md5', '53195454e1210adae36ecb34453a1f5a'), metadata={}) iterator = storage.retrieve(storage_info['ticket']) assert isinstance(iterator, Iterator) test_file.seek(0) assert b''.join(iterator) == test_file.read() test_file.seek(0) storage.delete(storage_info['ticket']) with pytest.raises(ResourceNotFound): storage.delete(storage_info['ticket'])
def create_version_view(self, lid: LID) -> VersionView: lidvid = self.latest_lidvid(lid) if lidvid is None: # It's only read, not written to, and the Multiversioned # is empty (at least for this bundle). We can return # anything that's empty, so: return cast(VersionView, TempFS()) else: return VersionView(self, lidvid)
def test_validator_repo_duplicated_reports(mocker, param_json): mocker.patch( 'connect.reports.validator._validate_report', return_value=[], ) report_dict_1 = { 'name': 'Report', 'readme_file': 'readme.md', 'entrypoint': 'reports.report_package.entrypoint', 'audience': ['vendor', 'provider'], 'parameters': [param_json()], 'report_spec': '2', } report_dict_2 = { 'name': 'Report', 'readme_file': 'readme.md', 'entrypoint': 'reports.report_package.entrypoint', 'audience': ['vendor', 'provider'], 'parameters': [param_json()], 'report_spec': '2', } renderer_csv_dict = { 'root_path': 'root_path', 'id': '123', 'type': 'csv', 'description': 'CSV Renderer', 'default': True, } csv_renderer = RendererDefinition(**renderer_csv_dict) report_1 = ReportDefinition( root_path='root_path', **report_dict_1, renderers=[csv_renderer], ) report_2 = ReportDefinition( root_path='root_path', **report_dict_2, renderers=[csv_renderer], ) tmp_filesystem = TempFS() tmp_filesystem.create('readme.md') repo_dict = { 'name': 'Reports Repository', 'readme_file': 'readme.md', 'version': '1.0.0', 'language': 'python', 'reports': [report_1, report_2], } repo = RepositoryDefinition( root_path=tmp_filesystem.root_path, **repo_dict, ) errors = validate(repo) assert len(errors) != 0 assert 'Multiple reports within single module found' in errors[0]
def test_repository_tempfs_definition_description(repo_json): expected_descr = 'This is the repository markdown description' tmp_fs = TempFS() with tmp_fs.open('readme.md', 'w') as fp: fp.write(expected_descr) repo_data = repo_json(readme_file='readme.md') defs = RepositoryDefinition(root_path=tmp_fs.root_path, **repo_data) assert defs.description == expected_descr
def test_importer_on_meta_path(self): t = TempFS() self._init_modules(t) ih = FSImportHook(t) sys.meta_path.append(ih) try: self._check_imports_are_working() finally: sys.meta_path.remove(ih) t.close()