def test_not_recursive(self): outputs = generate_index(pathlib.Path('tests/src/blog')) expectations = [{'template': 'skua_blogpost', 'publish_date': '22/08/2019', 'publish_time': '14:55:11 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/skua-is-still-a-static-site-generator.md'), 'content': '# Skua remains a static site generator\n(No surprises here!)\n\n**This site is ' 'for testing purposes (hence the gibberish content)!**'}, {'template': 'skua_blogpost', 'publish_date': '22/08/2019', 'publish_time': '15:33:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/what-is-markdown.md'), 'content': '# Markdown is a markup language\n(No surprises here)\n\n**This site is for ' 'testing purposes (hence the gibberish content)!**'}, {'template': 'skua_blogpost', 'publish_date': '19/08/2019', 'publish_time': '12:01:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/skua-is-a-static-site-generator.md'), 'content': '# Skua is a static site generator\n(No surprises here)\n\n**This site is for ' 'testing purposes (hence the gibberish content)!**'}, {'template': 'skua_blogpost', 'publish_date': '19/08/2019', 'publish_time': '12:01:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/look-an-internal-link.md'), 'content': '[Link](tests/src/blog/skua-is-a-static-site-generator.md)'}] self.assertTrue(len(expectations) == len(list(outputs))) outputs = generate_index(pathlib.Path('tests/src/blog')) for output in outputs: self.assertTrue(output in expectations)
def test_11_cwd_pathlib(self): jobspec_path = pathlib.PosixPath( self.jobspec_dir) / "valid" / "basic_v1.yaml" jobspec = Jobspec.from_yaml_file(jobspec_path) cwd = pathlib.PosixPath("/tmp") jobspec.cwd = cwd self.assertEqual(jobspec.cwd, os.fspath(cwd))
def test_realistic(self) -> None: self.assertTrue( match_path( pattern="comment/migrated_0036.js", path=pathlib.PosixPath( "/home/strager/tmp/Projects/esprima/test/fixtures/comment/migrated_0036.js" ), ) ) self.assertFalse( match_path( pattern="comment/migrated_0036.js", path=pathlib.PosixPath( "/home/strager/tmp/Projects/esprima/test/fixtures/expression/primary/object/migrated_0036.js" ), ) ) self.assertTrue( match_path( pattern="expression/primary/object/migrated_0036.js", path=pathlib.PosixPath( "/home/strager/tmp/Projects/esprima/test/fixtures/expression/primary/object/migrated_0036.js" ), ) )
def processing_keys(args): """Handle get, check and save keys process""" if args.username == None: #check username argument raise AssertionError("username argument must be specified") #check for stored key if args.consumer_key == None or args.consumer_secret == None: path = pathlib.PosixPath(args.cache_path + FLICKR_KEYS) if not path.is_file(): #if no cache keys exists args.consumer_key = getpass.getpass("Enter your Flickr API key: ") args.consumer_secret = getpass.getpass( "Enter your Flickr API secret: ") else: #if cache keys exists keys = json.loads(path.read_text()) args.consumer_key = keys[CONSUMER_KEY] args.consumer_secret = keys[CONSUMER_SECRET] if args.save_api_keys == True: #generate keys data keys = { CONSUMER_KEY: args.consumer_key, CONSUMER_SECRET: args.consumer_secret } #create a flickr keys file and store the key pathlib.PosixPath(args.cache_path).mkdir(parents=True, exist_ok=True) keys_file = pathlib.PosixPath(args.cache_path + FLICKR_KEYS).open("w+") keys_file.write(json.dumps(keys)) keys_file.close()
def test_match_file_name_and_parent(self) -> None: self.assertTrue( match_path( pattern="to/file.js", path=pathlib.PosixPath("/path/to/file.js"), ) ) self.assertFalse( match_path( pattern="other/file.js", path=pathlib.PosixPath("/path/to/file.js"), ) ) self.assertFalse( match_path( pattern="o/file.js", path=pathlib.PosixPath("/path/to/file.js"), ) ) self.assertFalse( match_path( pattern="to/file", path=pathlib.PosixPath("/path/to/file.js"), ) )
def __init__( self, path_rel, path_dart, build_dir, mpi ): self.path_rel = path_rel self.input_nml_file = path_dart / self.path_rel / 'input.nml' self.input_nml = f90nml.read(self.input_nml_file) if build_dir != path_dart: self.input_nml_file = build_dir / self.path_rel / 'input.nml' (build_dir / self.path_rel).mkdir(exist_ok=True, parents=True) self.input_nml.write(self.input_nml_file) self.compile(path_dart, path_rel, mpi) # list the mkmfs and get the binaries. mkmfs = list((pathlib.PosixPath(path_dart) / path_rel).glob('mkmf_*')) dart_exes = [pathlib.PosixPath(str(mm).replace('/mkmf_','/')) for mm in mkmfs] build_exes = [(build_dir / path_rel / dd.name) for dd in dart_exes] _ = [shutil.copy(str(dd), str(ss)) for dd, ss in zip(dart_exes, build_exes)] self.exes = {bb.name: bb for bb in build_exes}
def parse_config(self): """Parse configuration file""" cinterpol = configparser.ExtendedInterpolation() cparser = configparser.ConfigParser(interpolation=cinterpol) if self.arg['config_file']: cpath = pathlib.PosixPath(self.arg['config_file']) if cpath.is_file(): cparser.read(cpath.as_posix()) else: sys.exit('Error: invalid config file') else: cpath = pathlib.PosixPath('ldif-git-backup.conf') if cpath.is_file(): cparser.read(cpath.as_posix()) else: cpath = pathlib.PosixPath('/etc/ldif-git-backup.conf') if cpath.is_file(): cparser.read(cpath.as_posix()) if self.arg['config']: if cparser.has_section(self.arg['config']): config_params = cparser[self.arg['config']].items() else: sys.exit('Error: no config section named %s' % self.arg['config']) elif cparser.has_section('ldif-git-backup'): config_params = cparser['ldif-git-backup'].items() else: config_params = None return config_params
def create_disk(self, name, size=None, backing_on=None): if not size: size = 20 disk_path = pathlib.PosixPath("{path}/{name}.qcow2".format( path=self.get_storage_dir(), name=name)) logger.debug("create_disk: %s (%dGB)", str(disk_path), size) root = ET.fromstring(STORAGE_VOLUME_XML) root.find("./name").text = disk_path.name root.find("./capacity").text = str(size) root.find("./target/path").text = str(disk_path) if backing_on: backing_file = pathlib.PosixPath( "{path}/upstream/{backing_on}.qcow2".format( path=self.get_storage_dir(), backing_on=backing_on)) backing = ET.SubElement(root, "backingStore") ET.SubElement(backing, "path").text = str(backing_file) ET.SubElement(backing, "format").attrib = {"type": "qcow2"} xml = ET.tostring(root).decode() try: return self.storage_pool_obj.createXML(xml) except libvirt.libvirtError as e: if e.get_error_code() == libvirt.VIR_ERR_STORAGE_VOL_EXIST: logger.error( ("A volume image already exists and prevent the creation " " of a new one. You can remove it with the following " "command:\n" " sudo virsh vol-delete --pool " "{pool_name} {vol_name}.qcow2").format( pool_name=self.storage_pool_obj.name(), vol_name=name)) sys.exit(1) raise
def init_storage_pool(self, storage_pool): try: self.storage_pool_obj = self.conn.storagePoolLookupByName( storage_pool) except libvirt.libvirtError as e: if e.get_error_code() != libvirt.VIR_ERR_NO_STORAGE_POOL: raise (e) storage_dir = pathlib.PosixPath(DEFAULT_STORAGE_DIR) if not self.storage_pool_obj: self.storage_pool_obj = self.create_storage_pool( name=storage_pool, directory=storage_dir) try: full_dir = storage_dir / "upstream" dir_exists = full_dir.is_dir() except PermissionError: dir_exists = False if not dir_exists: qemu_dir = pathlib.PosixPath(QEMU_DIR) logger.error( USER_CREATE_STORAGE_POOL_DIR.format( qemu_user=qemu_dir.owner(), qemu_group=qemu_dir.group(), storage_dir=self.get_storage_dir(), )) exit(1) if not self.storage_pool_obj.isActive(): self.storage_pool_obj.create(0)
def get_test_data(): categories = [ Category(id=1270, name="fdas", is_top_level=False, parent_id=1120), Category(id=1000, name="Farzeuge", is_top_level=True), ] listings = [ Listing( id=2032160001, title="dfasfdsa", daten={}, kurzbeschreibung="fdas", gebotsbasis="fdaf", lagerort="fdassd", attachments=[], pictures_paths=[ pathlib.PosixPath("/tmp"), pathlib.PosixPath("/tmp/test.json"), ], category=categories[0], gebotstermin=datetime(2020, 4, 23, 13, 0), ), Listing( id=2032170001, title="dfasfdsa", daten={}, kurzbeschreibung="fdas", gebotsbasis="fdaf", lagerort="fdassd", attachments=[], pictures_paths=[pathlib.Path("/tmp")], category=categories[1], gebotstermin=datetime(2020, 4, 23, 13, 0), ), ] return (categories, listings)
def _Run(vm, server_ip: str, server_port: str, threads: int, pipeline: int, clients: int, password: Optional[str] = None) -> 'MemtierResult': """Runs the memtier benchmark on the vm.""" results_file = pathlib.PosixPath(f'{MEMTIER_RESULTS}_{server_port}') vm.RemoteCommand(f'rm -f {results_file}') json_results_file = (pathlib.PosixPath(f'{JSON_OUT_FILE}_{server_port}') if MEMTIER_TIME_SERIES.value else None) vm.RemoteCommand(f'rm -f {json_results_file}') # Specify one of run requests or run duration. requests = ( MEMTIER_REQUESTS.value if MEMTIER_RUN_DURATION.value is None else None) test_time = ( MEMTIER_RUN_DURATION.value if MEMTIER_RUN_MODE.value == MemtierMode.NORMAL_RUN else WARM_UP_SECONDS + MEMTIER_CPU_DURATION.value) cmd = BuildMemtierCommand( server=server_ip, port=server_port, protocol=MEMTIER_PROTOCOL.value, run_count=MEMTIER_RUN_COUNT.value, clients=clients, threads=threads, ratio=MEMTIER_RATIO.value, data_size=MEMTIER_DATA_SIZE.value, key_pattern=MEMTIER_KEY_PATTERN.value, pipeline=pipeline, key_minimum=1, key_maximum=MEMTIER_KEY_MAXIMUM.value, random_data=True, test_time=test_time, requests=requests, password=password, outfile=results_file, cluster_mode=MEMTIER_CLUSTER_MODE.value, json_out_file=json_results_file) vm.RemoteCommand(cmd) output_path = os.path.join( vm_util.GetTempDir(), f'memtier_results_{server_port}') vm_util.IssueCommand(['rm', '-f', output_path]) vm.PullFile(vm_util.GetTempDir(), results_file) time_series_json = None if json_results_file: json_path = os.path.join( vm_util.GetTempDir(), f'json_data_{server_port}') vm_util.IssueCommand(['rm', '-f', json_path]) vm.PullFile(vm_util.GetTempDir(), json_results_file) with open(json_path, 'r') as ts_json: time_series_json = ts_json.read() with open(output_path, 'r') as output: summary_data = output.read() return MemtierResult.Parse(summary_data, time_series_json)
def test_path(): """ 指定パス直下のテスト """ input_path = pathlib.PosixPath("input_test") input_root = pathlib.PosixPath("input_test") output_root = pathlib.PosixPath("output_test") res = resolve_output_dir_path(input_path, input_root, output_root) assert str(res) == "output_test"
def test_path_2(): """ 指定パスから1階層深いテスト """ input_path = pathlib.PosixPath("input_test/folder1") input_root = pathlib.PosixPath("input_test") output_root = pathlib.PosixPath("output_test") res = resolve_output_dir_path(input_path, input_root, output_root) assert str(res) == "output_test/folder1"
def __init__( self, source_dir: str, mkmf_template: str, work_dirs: list=['models/wrf_hydro/work'], mpi: bool=True, build_dir: str = None, overwrite: bool = False ): self.source_dir = pathlib.PosixPath(source_dir).absolute() self.mkmf_template = self.source_dir / ('build_templates/' + mkmf_template) if type(work_dirs) is not list: work_dirs = [work_dirs] self.work_dirs = [pathlib.PosixPath(ww) for ww in work_dirs] self.mpi = mpi self.build_dir = build_dir self.overwrite = overwrite self.git_hash = get_git_revision_hash(self.source_dir) # mkmf establishment mkmf_dir = self.source_dir / 'build_templates' mkmf_target = mkmf_dir / 'mkmf.template' if mkmf_target.exists(): mkmf_target.unlink() mkmf_target.symlink_to(self.mkmf_template) self.object_id = None """str: A unique id to join object to compile directory.""" ## Setup directory paths if self.build_dir is not None: self.build_dir = pathlib.PosixPath(self.build_dir) self.build_dir.mkdir() # TODO(JLM): enforce that basename(build_dir) is experiment_dir for www in self.work_dirs: ww = str(www) dart_work = DartWork( ww, self.source_dir, self.build_dir, self.mpi ) ww_repl = ww.replace('/','__') self.__dict__.update({ww_repl:dart_work}) # Add in unique ID file to match this object to prevent assosciating # this directory with another object self.object_id = str(uuid.uuid4()) with open(self.build_dir.joinpath('.uid'),'w') as f: f.write(self.object_id) self.pickle() print('DART successfully compiled into ' + str(self.build_dir))
def make_directory(self): date_object = self.get_update_date() path_construction = pathlib.PosixPath(self.Rootpath + '/construction/' + date_object) path_consulting = pathlib.PosixPath(self.Rootpath + '/consulting/' + date_object) pathlib.Path.mkdir(path_construction, exist_ok=True, parents=True) pathlib.Path.mkdir(path_consulting, exist_ok=True, parents=True)
def test_path_7(): """ 相対パス """ input_path = pathlib.PosixPath("../web_image_converter/input_test/folder1") input_root = pathlib.PosixPath("../web_image_converter/input_test") output_root = pathlib.PosixPath("../web_image_converter/output_test") res = resolve_output_dir_path(input_path, input_root, output_root) assert str(res) == "../web_image_converter/output_test/folder1"
def run(setting: Setting) -> None: # NOTE: スタート、終了のログを出すためにこうしているが少し冗長かも? input_root_dir = pathlib.PosixPath(setting.input_directory) output_root_dir = pathlib.PosixPath(setting.output_directory) logger.debug(f"start convert: {input_root_dir} -> {output_root_dir}") file_conveter.logger = logger file_conveter.convert_all(input_root_dir=input_root_dir, output_root_dir=output_root_dir) logger.debug(f"end convert: {input_root_dir} -> {output_root_dir}")
def test_path_4(): """ 指定できないパスの場合は ValueError になるテスト """ with pytest.raises(ValueError): input_path = pathlib.PosixPath("input_test") input_root = pathlib.PosixPath("input_test/folder1") output_root = pathlib.PosixPath("output_test") resolve_output_dir_path(input_path, input_root, output_root)
def test_path_5(): """ 絶対パス input と相対パス output """ pwd = pathlib.PosixPath(os.getcwd()) input_path = pwd / "input_test/folder1" input_root = pwd / "input_test" output_root = pathlib.PosixPath("output_test") res = resolve_output_dir_path(input_path, input_root, output_root) assert str(res) == "output_test/folder1"
def check_file_exist_colon(run_dir, file_str): """Takes a file WITH A COLON (not without).""" if type(file_str) is not str: file_str = str(file_str) file_colon = pathlib.PosixPath(file_str) file_no_colon = pathlib.PosixPath(file_str.replace(':','_')) if (run_dir / file_colon).exists(): return './' + str(file_colon) if (run_dir / file_no_colon).exists(): return './' + str(file_no_colon) return None
def test_path_6(): """ 相対パス input と絶対パス output """ pwd = pathlib.PosixPath(os.getcwd()) input_path = pathlib.PosixPath("input_test/folder1") input_root = pathlib.PosixPath("input_test") output_root = pwd / "output_test" res = resolve_output_dir_path(input_path, input_root, output_root) assert str(res) == str(pwd / "output_test/folder1")
def test_recursive(self): index = generate_index(pathlib.Path('tests/src/blog'), recursive=True) expectations = [{'template': 'skua_blogpost', 'publish_date': '22/08/2019', 'publish_time': '14:55:11 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/skua-is-still-a-static-site-generator.md')}, {'template': 'skua_blogpost', 'publish_date': '22/08/2019', 'publish_time': '15:33:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/what-is-markdown.md')}, {'template': 'skua_blogpost', 'publish_date': '19/08/2019', 'publish_time': '12:01:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/skua-is-a-static-site-generator.md')}, {'template': 'skua_blogpost', 'publish_date': '19/08/2019', 'publish_time': '12:01:00 UTC', 'title': 'Hello World!', 'subtitle': 'I exist!', 'file_path': pathlib.PosixPath('tests/src/blog/look-an-internal-link.md')}]
def test_bind_folder(): """test bindit mapping of folder - bind referenced dir, without any superfluous nesting.""" with tempfile.TemporaryDirectory(prefix=TEMPFILE_PREFIX) as sourcedir: sourcedir_resolved = pathlib.Path(sourcedir).resolve() destdir_expected = pathlib.PosixPath( "/bindit") / sourcedir_resolved.relative_to( sourcedir_resolved.anchor) with DockerRun("ls", sourcedir) as container: assert container.is_running() # check that sourcedir has been mounted mounts = container.get_mounts() assert sourcedir_resolved in mounts assert pathlib.PosixPath( mounts[sourcedir_resolved]) == destdir_expected
def kvm_binary(self): paths = [pathlib.PosixPath(i) for i in KVM_BINARIES] for i in paths: if i.exists(): return i else: raise Exception("Failed to find the kvm binary in: ", paths)
def test_serialize_pathlib(self): # Pure path objects work in all platforms. self.assertSerializedEqual(pathlib.PurePosixPath()) self.assertSerializedEqual(pathlib.PureWindowsPath()) path = pathlib.PurePosixPath("/path/file.txt") expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"}) self.assertSerializedResultEqual(path, expected) path = pathlib.PureWindowsPath("A:\\File.txt") expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"}) self.assertSerializedResultEqual(path, expected) # Concrete path objects work on supported platforms. if sys.platform == "win32": self.assertSerializedEqual(pathlib.WindowsPath.cwd()) path = pathlib.WindowsPath("A:\\File.txt") expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"}) self.assertSerializedResultEqual(path, expected) else: self.assertSerializedEqual(pathlib.PosixPath.cwd()) path = pathlib.PosixPath("/path/file.txt") expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"}) self.assertSerializedResultEqual(path, expected) field = models.FilePathField(path=pathlib.PurePosixPath("/home/user")) string, imports = MigrationWriter.serialize(field) self.assertEqual( string, "models.FilePathField(path=pathlib.PurePosixPath('/home/user'))", ) self.assertIn("import pathlib", imports)
def __init__(self, configfile: str = USER_CONFIG): # Get HTTPS auth User and Password to use for the hard coded subdomain config = configparser.ConfigParser() config.optionxform = lambda option: option # preserve case for letters configfullpath = pathlib.PosixPath(configfile).expanduser( ) # turn ~ into a full path, since Python ConfigParser doesn't cope with ~/<file> config.read(configfullpath) if SUBDOMAIN in config.sections() and all( key in ['User', 'Password'] for (key, value) in config.items(SUBDOMAIN)): self.username = config[SUBDOMAIN].get('User') self.password = config[SUBDOMAIN].get('Password') else: print( 'Config file %s not found, or lacks section [%s] with both User and Password' % (USER_CONFIG, SUBDOMAIN)) import getpass self.username = input("User: "******"Password for {:}: ".format( self.username)) # Open a HTTPS session self.sess = requests.Session() self.open_session()
def resolve_constitution(fp): path = pathlib.PosixPath(fp).expanduser() path.touch() f = open(str(path), 'r') j = json.load(f) f.close() assert 'masternodes' in j.keys(), 'No masternodes section.' assert 'delegates' in j.keys(), 'No delegates section.' const = { 'masternodes': list(j['masternodes'].keys()), 'delegates': list(j['delegates'].keys()) } bootnodes = {**j['masternodes'], **j['delegates']} formatted_bootnodes = {} for vk, ip in bootnodes.items(): assert is_valid_ip( ip), 'Invalid IP string provided to boot node argument.' formatted_bootnodes[vk] = f'tcp://{ip}:19000' return const, formatted_bootnodes
def test_match_file_glob_with_parent_directory(self) -> None: self.assertTrue( match_path( pattern="to/*", path=pathlib.PosixPath("/path/to/file.js"), ) )
def get_version(): parent = pathlib.PosixPath(__file__).parent.absolute() file_path = parent / 'carp_api' / 'VERSION' with open(file_path) as fpl: return fpl.readlines()[0].strip()
def __init__(self, path): self.path = pathlib.PosixPath(path).resolve(strict=True) self.symlinks = [] self.dependencies = [] if self.path != path: self.symlinks.append(str(path))