def load_legacy(filename): m = Path(filename) name = m.stem d = {} c = count() r = True def num(s): try: return int(s) except ValueError: return float(s) with m.open() as f: while r: c.next() r = re.search("([^\d\W]+)\s+(-*\d+\.*\d*)", f.readline()) if r: d[r.groups()[0]] = num(r.groups()[1]) l = c.next() - 1 data = np.loadtxt(str(m.resolve()), skiprows=l) dataset = NpDataset(data, resolution=d["cellsize"]) if "UTMzone" in d: gp = GeoPoint(UTM("UTMzone"), d["xllcorner"], d["yllcorner"]) else: gp = GeoPoint(UTM(1), d["xllcorner"], d["yllcorner"]) return GridMesh(gp, dataset)
def _expand_path(path): """Expand a server path that may contain symbolic links """ subbed = Path(re.sub(r'^/\~(.*?)/', r'/home/\1/public_html/', path)) resolved = subbed.resolve() if subbed.exists() else subbed return re.sub(r'^/home/(.*?)/public_html/', r'/~\1/', str(resolved) if resolved.exists() else path)
def set_root_path(self, root_path: pathlib2.Path) -> None: """root_path setter :param root_path: path to scrape, used to obtain relative path :type root_path: pathlib2.Path """ self.root_path = root_path.resolve().absolute()
def __init__(self, config, **options): path = Path(tc_parameters.WORKING_DIR, "patterndb-{}.xml".format(self.index)) config.write_to(path) self.index += 1 super(DBParser, self).__init__("db-parser", file=path.resolve(), **options)
def __init__(self, name, shebang=None, rootdir=None, overwrite=True): """ Creates script with specified name. Creates both path (by default is CWD) and script file. If shebang is specified, puts it in the first line. Makes script executable (depends on platform). If overwrite is False, raises an error when file already exists. Otherwise (by default) completely rewrites the file. """ rootdir = Path(rootdir or '.') rootdir.mkdir(exist_ok=True, parents=True) rootdir.resolve() self.filename = rootdir/name if not overwrite and self.filename.exists(): raise RuntimeError("Script file already exists: {0}".format(self.filename)) with self.filename.open('wb') as f: if shebang: f.write(shebang.encode('utf-8', 'replace') + b'\n') self._make_executable(self.filename)
def cmk_dir(): try: p = Path(__file__) cmk_root_table = p.parent cmk_root = Path(cmk_root_table).joinpath("../../../../..") return cmk_root.resolve() except IOError as e: print("Exception {}".format(e)) return Path("")
def _launch_world(self): uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) roslaunch.configure_logging(uuid) base_path = Path(__file__).parent.parent / "assets" launch_path = base_path / "launch" / self._launch_file args = ["base_dir:=" + str(base_path.resolve()), "fast_physics:=true"] self.launch = roslaunch.parent.ROSLaunchParent( uuid, [(str(launch_path.resolve()), args)], is_core=True) self.launch.start()
class ExampleDestination(DestinationDriver): def __init__(self, filename, **options): self.driver_name = "example-destination" self.path = Path(tc_parameters.WORKING_DIR, filename) super(ExampleDestination, self).__init__( None, dict({"filename": self.path.resolve()}, **options)) def wait_file_content(self, content): with self.path.open() as f: message_reader = MessageReader(f.readline, SingleLineParser()) while True: msg = message_reader.pop_messages(1)[0] if content in msg: return True return False
class ExampleDestination(DestinationDriver): def __init__(self, filename, **options): self.driver_name = "example-destination" self.path = Path(tc_parameters.WORKING_DIR, filename) self.io = FileIO(self.path) super(ExampleDestination, self).__init__( None, dict({"filename": self.path.resolve()}, **options)) def get_path(self): return self.path def read_log(self): return self.read_logs(1)[0] def read_logs(self, counter): return self.io.read_number_of_lines(counter) def read_until_logs(self, logs): return self.io.read_until_lines(logs)
def __init__(self, path: pathlib2.Path, scrape_now: bool = False, filters: Iterable[BaseFileFilter] = None): """Base Scraper Initializer :param path: path to scrape :type path: pathlib2.Path :param scrape_now: whether start scraping when the object is created, defaults to False :type scrape_now: bool, optional :param filters: list of filters, defaults to None :type filters: Iterable[BaseFileFilter], optional """ if isinstance(path, str): path = pathlib2.Path(path) self.root = path.resolve().absolute() self.history = set() self.filters = filters if filters else [] for filter_ in self.filters: filter_.set_root_path(self.root) self.tree = self.scrape(self.root, 0)[0] if scrape_now else None
def _resolve_attachment_path(self, path): """Find attachment file or raise MailmergeError.""" # Error on empty path if not path.strip(): raise MailmergeError("Empty attachment header.") # Create a Path object and handle home directory (tilde ~) notation path = Path(path.strip()) path = path.expanduser() # Relative paths are relative to the template's parent dir if not path.is_absolute(): path = self.template_path.parent / path # Resolve any symlinks path = path.resolve() # Check that the attachment exists if not path.exists(): raise MailmergeError("Attachment not found: {}".format(path)) return path
def tried(): try: path = Path("C:\\Users\\" + user + "\\Desktop\\log.csv") path.resolve() except OSError: Log_Document = open("log.csv", "wb") writer = csv.writer(Log_Document, lineterminator='\n') writer.writerow((header)) writer.writerow((Data1)) try: writer.writerow((Data2)) except NameError: email() else: try: writer.writerow((Data3)) except NameError: email() else: try: writer.writerow((Data4)) except NameError: email() else: try: writer.writerow((Data5)) except NameError: email() else: try: writer.writerow((Data6)) except NameError: email() else: try: writer.writerow((Data7)) except NameError: email() else: try: writer.writerow((Data8)) except NameError: email() else: try: writer.writerow((Data9)) except NameError: email() else: try: writer.writerow((Data10)) except NameError: email() else: with open(r'log.csv', 'a') as f: writer = csv.writer(f) writer.writerow((Data1)) try: writer.writerow((Data2)) except NameError: email() else: try: writer.writerow((Data3)) except NameError: email() else: try: writer.writerow((Data4)) except NameError: email() else: try: writer.writerow((Data5)) except NameError: email() else: try: writer.writerow((Data6)) except NameError: email() else: try: writer.writerow((Data7)) except NameError: email() else: try: writer.writerow((Data8)) except NameError: email() else: try: writer.writerow((Data9)) except NameError: email() else: try: writer.writerow((Data10)) except NameError: email()
gmx_mpi.symlink_to(gmx_exe) return str(gmx_mpi.expanduser()) @pytest.fixture def modified_config(request): link_gmx_mpi = request.config.getoption('link_gmx_mpi') tools = str(Path('~/gmx_mpi').expanduser()) if link_gmx_mpi else '' append_suffix = 'yes' if request.config.getoption('append_suffix') else 'no' return tools, append_suffix, Path path_config = Path('~/.gromacswrapper.cfg').expanduser() gw_config = ConfigParser() if path_config.exists(): gw_config.read(str(path_config.resolve())) config_existed = True else: gw_config.read('gromacs/templates/gromacswrapper.cfg') config_existed = False config_backup = path_config.with_suffix('.bak') def pytest_configure(config): link_gmx_mpi = config.getoption('link_gmx_mpi') append_suffix = 'yes' if config.getoption('append_suffix') else 'no' if config_existed: shutil.copy(str(path_config), str(config_backup)) tools = gmx_mpi_linked(link_gmx_mpi) gw_config.set('Gromacs', 'tools', tools) gw_config.set('Gromacs', 'append_suffix', append_suffix)
class Config: @classmethod def load_from_file(cls, f, dir_, extra=()): self = cls() data = IncludeLoader.load(f) for p in extra: data.update(IncludeLoader.load(p)) try: self.name = data['name'] self.theme = dir_ / data.get('theme', 'theme') self.output = Path(data.get('output', 'out')) self.content_root = dir_ / data.get('content-root', '.') self.ignore = data.get('ignore', []) self.output.mkdir(exist_ok=True) logger.info('Outputting to %s', self.output.resolve()) self.htmlless = data.get('pretty-html', False) if 'compiled-theme' in data: self.compiled_theme = dir_ / data['compiled-theme'] else: self.compiled_theme = None self.resources = [] for r in data.get('resources', []): try: command = r['command'] source = Path(r['source']) output = Path(r['output']) suffix = r.get('ext', None) recursive = r.get('recursive', False) pattern = r.get('pattern', '*') except KeyError as e: raise CMS7Error('resource missing required key {}'.format(e.args[0])) from e self.resources.append(Resource(self, command, dir_, source, output, suffix, recursive, pattern)) self.module_id = {} self._modules = [] for m in data['modules']: name = m.pop('name') _id = None if 'id' in m: _id = m.pop('id') if name not in _MODULES: raise CMS7Error('unknown module: {!r}'.format(name)) logger.info('Loading module: %s', name) module = _MODULES[name](self, self.content_root, **m) if _id is not None: self.module_id[_id] = module self._modules.append(module) except KeyError as e: raise CMS7Error('config missing required key {}'.format(e.args[0])) from e self._data = data return self def modules(self): yield from self._modules def __getitem__(self, k): return self._data[k]
class Config: @classmethod def load_from_file(cls, f, dir_, extra=()): self = cls() data = IncludeLoader.load(f) for p in extra: data.update(IncludeLoader.load(p)) try: self.name = data['name'] self.theme = dir_ / data.get('theme', 'theme') self.output = Path(data.get('output', 'out')) self.content_root = dir_ / data.get('content-root', '.') self.ignore = data.get('ignore', []) self.output.mkdir(exist_ok=True) logger.info('Outputting to %s', self.output.resolve()) self.optimistic = False self.htmlless = data.get('pretty-html', False) self.absolute_url = data.get('absolute-url') if self.absolute_url is None: logger.warning( "absolute-url is not set, some modules won't work.") if 'compiled-theme' in data: self.compiled_theme = dir_ / data['compiled-theme'] else: self.compiled_theme = None self.resources = [] for r in data.get('resources', []): try: command = r['command'] source = Path(r['source']) output = Path(r['output']) suffix = r.get('ext', None) recursive = r.get('recursive', False) pattern = r.get('pattern', '*') except KeyError as e: raise CMS7Error('resource missing required key {}'.format( e.args[0])) from e self.resources.append( Resource(self, command, dir_, source, output, suffix, recursive, pattern)) self.module_id = {} self._modules = [] for m in data['modules']: name = m.pop('name') _id = None if 'id' in m: _id = m.pop('id') if name not in _MODULES: raise CMS7Error('unknown module: {!r}'.format(name)) logger.info('Loading module: %s', name) module = _MODULES[name](self, self.content_root, **m) if _id is not None: self.module_id[_id] = module self._modules.append(module) except KeyError as e: raise CMS7Error('config missing required key {}'.format( e.args[0])) from e self._data = data return self def modules(self): yield from self._modules def __getitem__(self, k): return self._data[k]
def interesting(cli_args, temp_prefix): """Interesting if the binary crashes with a possibly-desired signature on the stack. Args: cli_args (list): List of input arguments. temp_prefix (str): Temporary directory prefix, e.g. tmp1/1 or tmp4/1 Returns: bool: True if the intended signature shows up on the stack, False otherwise. """ parser = argparse.ArgumentParser( prog="crashesat", usage="python -m lithium %(prog)s [options] binary [flags] testcase.ext" ) parser.add_argument( "-r", "--regex", action="store_true", default=False, help="Allow search for regular expressions instead of strings.") parser.add_argument( "-s", "--sig", default="", type=str, help="Match this crash signature. Defaults to '%default'.") parser.add_argument( "-t", "--timeout", default=120, type=int, help="Optionally set the timeout. Defaults to '%default' seconds.") parser.add_argument("cmd_with_flags", nargs=argparse.REMAINDER) args = parser.parse_args(cli_args) log = logging.getLogger(__name__) # Examine stack for crash signature, this is needed if args.sig is specified. runinfo = timed_run.timed_run(args.cmd_with_flags, args.timeout, temp_prefix) if runinfo.sta == timed_run.CRASHED: os_ops.grab_crash_log(args.cmd_with_flags[0], runinfo.pid, temp_prefix, True) crash_log = Path(temp_prefix + "-crash.txt") time_str = " (%.3f seconds)" % runinfo.elapsedtime if runinfo.sta == timed_run.CRASHED: if crash_log.resolve().is_file(): # pylint: disable=no-member # When using this script, remember to escape characters, e.g. "\(" instead of "(" ! if file_contains(str(crash_log), args.sig, args.regex)[0]: log.info("Exit status: %s%s", runinfo.msg, time_str) return True log.info("[Uninteresting] It crashed somewhere else!%s", time_str) return False log.info( "[Uninteresting] It appeared to crash, but no crash log was found?%s", time_str) return False log.info("[Uninteresting] It didn't crash.%s", time_str) return False
return str(gmx_mpi.expanduser()) @pytest.fixture def modified_config(request): link_gmx_mpi = request.config.getoption('link_gmx_mpi') tools = str(Path('~/gmx_mpi').expanduser()) if link_gmx_mpi else '' append_suffix = 'yes' if request.config.getoption( 'append_suffix') else 'no' return tools, append_suffix, Path path_config = Path('~/.gromacswrapper.cfg').expanduser() gw_config = ConfigParser() if path_config.exists(): gw_config.read(str(path_config.resolve())) config_existed = True else: gw_config.read('gromacs/templates/gromacswrapper.cfg') config_existed = False config_backup = path_config.with_suffix('.bak') def pytest_configure(config): link_gmx_mpi = config.getoption('link_gmx_mpi') append_suffix = 'yes' if config.getoption('append_suffix') else 'no' if config_existed: shutil.copy(str(path_config), str(config_backup)) tools = gmx_mpi_linked(link_gmx_mpi) gw_config.set('Gromacs', 'tools', tools) gw_config.set('Gromacs', 'append_suffix', append_suffix)
from pathlib2 import Path from deepzoom.factory_functions import Deepzoom if __name__=='__main__': imageName = 'img_001_1268_1024.jpg' img = Path(__file__).parent.parent.parent.joinpath('unittest/python/%s'%imageName) img.resolve() # # dzImg = FlatDeepzoomImage(str(img)) # dzGen = DeepzoomInterface(dzImg) dzGen = Deepzoom(img,create_static_cache=True,tileQuality=100) print 'Image Size: (%d,%d)'%(dzGen.imageLayout[0].w,dzGen.imageLayout[0].h) print 'N DeepZoom Levels: %s'%(len(dzGen.tileLayout)) for lvl,layout in enumerate(dzGen.tileLayout): print 'DeepZoom Level %d: (%d,%d) tiles'%(lvl,layout.w,layout.h) print 'Saving out all tiles...' for lvl in range(len(dzGen.tileLayout)): for col in range(int(dzGen.tileLayout[lvl].w)): for row in range(int(dzGen.tileLayout[lvl].h)): t = dzGen.get_tile(lvl,col,row) print 'Saved!' print 'Cache occupies %d kb'%(dzGen.cache_size/1024.0) print 'Popping some tiles.'