def test_open_file(self): path = MultiplexedPath(self.folder) with self.assertRaises(FileNotFoundError): path.read_bytes() with self.assertRaises(FileNotFoundError): path.read_text() with self.assertRaises(FileNotFoundError): path.open()
def linkiter(path): html = path.read_text(encoding='utf-8') for m in re.finditer(r'(href|src)="(.*?)"', html): url = urlparse(urljoin(f'https://entm.auone.jp/', m.group(2))) ext = os.path.splitext(url.path)[1] if url.hostname == 'entm.auone.jp' and ext in exts: yield url
def html_doc_items(self) -> [(str, str)]: path = self.parent.base_path( ) / self.mapper_script_file_name if self.mapper_script_file_name else '' return [ ('file pattern', _.i[self.file_pattern]), ('compression', _.tt[self.compression]), ('read mode', _.tt[self.read_mode]), ('date regex', _.tt[escape(self.date_regex)] if self.date_regex else None), ('file dependencies', [_.i[dependency, _.br] for dependency in self.file_dependencies]), ('mapper script file name', _.i[self.mapper_script_file_name]), (_.i['mapper script'], html.highlight_syntax( path.read_text().strip('\n') if self.mapper_script_file_name and path.exists() else '', 'python')), ('make unique', _.tt[repr(self.make_unique)]), ('skip header', _.tt[self.skip_header]), ('target_table', _.tt[self.target_table]), ('db alias', _.tt[self.db_alias]), ('partion target table by day_id', _.tt[self.partition_target_table_by_day_id]), ('sql delimiter char', _.tt[json.dumps(self.delimiter_char ) if self.delimiter_char != None else None]), ('quote char', _.tt[json.dumps(self.quote_char ) if self.quote_char != None else None]), ('null value string', _.tt[json.dumps(self.null_value_string ) if self.null_value_string != None else None]), ('time zone', _.tt[self.timezone]) ]
def __init__(self, path): self.path = path self.tasks = {} for line in path.read_text().split("\n"): if line: task = Task.parse(line) self.tasks[task.id] = task
def generate_or_check(manifest, args, path, func): """Generate/check a file with a single generator Return True if successful; False if a comparison failed. """ outfile = io.StringIO() func(manifest, args, outfile) generated = outfile.getvalue() existing = path.read_text() if generated != existing: if args.generate: path.write_text(generated) else: print(f'File {path} differs from expected!') diff = difflib.unified_diff( generated.splitlines(), existing.splitlines(), str(path), '<expected>', lineterm='', ) for line in diff: print(line) return False return True
def read_file(path: Path) -> str: ''' 파일을 읽어 리소스 내용을 가져옵니다. @param path: 리소스 파일 경로 @return: 리소스 내용 ''' result = path.read_text() return result
def __init__(self, task: RunCommand, expectation_suffix=None): super().__init__(task, name=task.description()) leaf_name = f'expected.{task.test_parameters.tag}' if expectation_suffix is not None: leaf_name = leaf_name + '.' + expectation_suffix leaf_name = leaf_name + '.txt' path = task.test_parameters.test_directory / leaf_name self._expected = path.read_text()
def get_requirements(path: Path) -> List[str]: req = [] LOG.info("Reading reqs from %s", path) for line in path.read_text().splitlines(): line = line.split("#", 1)[0].strip() if not line: continue req.append(line) return req
def read_file(filename: str) -> str: """Get the contents of a file contained with qutebrowser. Args: filename: The filename to open as string. Return: The file contents as string. """ if filename in _resource_cache: return _resource_cache[filename] path = _resource_path(filename) return path.read_text(encoding='utf-8')
def _process_file(self, path, rewriter): """Process one file.""" log.warning('Processing %s', path) try: rw = rewriter( path.read_text(), self.rewrite_action, filename=str(path)) except UnicodeDecodeError: # pragma: no cover log.error('Error', exc_info=True) else: result = rw() if not self.only_check_syntax: file_out = pathlib.Path(str(path) + '.out') file_out.write_text(result, encoding='utf-8') self.output_files.append(file_out)
def read(self, path: Path, text: Optional[str] = None) -> Tuple[str, List[Diagnostic]]: if text is None: text = path.read_text(encoding="utf-8") text, diagnostics = self.substitute(text) match_found = PAT_GIT_MARKER.finditer(text) if match_found: for match in match_found: lineno = text.count("\n", 0, match.start()) diagnostics.append(GitMergeConflictArtifactFound(path, lineno)) return (text, diagnostics)
def read_file(filename: str) -> str: """Get the contents of a file contained with qutebrowser. Args: filename: The filename to open as string. Return: The file contents as string. """ if filename in _cache: return _cache[filename] path = _path(filename) with _keyerror_workaround(): return path.read_text(encoding='utf-8')
def html_doc_items(self) -> [(str, str)]: path = self.sql_file_path() return [('file pattern', _.i[self.file_pattern]), ('read mode', _.tt[self.read_mode]), ('date regex', _.tt[escape(self.date_regex)] if self.date_regex else None), ('file dependencies', [_.i[dependency, _.br] for dependency in self.file_dependencies]), ('query file name', _.i[self.sql_file_name]), (_.i['query'], html.highlight_syntax(path.read_text().strip('\n') if self.sql_file_name and path.exists() else '', 'sql')), ('target_table', _.tt[self.target_table]), ('db alias', _.tt[self.db_alias]), ('partion target table by day_id', _.tt[self.partition_target_table_by_day_id]), ('truncate partitions', _.tt[self.truncate_partitions]), ('time zone', _.tt[self.timezone])]
def fetch_answers(): pathlist = Path('answers').glob('*.html') files = [{ 'name': path.name, 'data': path.read_text(), } for path in pathlist] memory_file = BytesIO() with zipfile.ZipFile(memory_file, 'w') as zf: for singlefile in files: data = zipfile.ZipInfo(singlefile['name']) data.date_time = time.localtime(time.time())[:6] data.compress_type = zipfile.ZIP_DEFLATED zf.writestr(data, singlefile['data']) memory_file.seek(0) zipfilename = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + ".zip" return send_file(memory_file, attachment_filename=zipfilename, as_attachment=True)
def _process_file(self, path, rewriter): """Process one file.""" log.warning('Processing %s', path) try: rw = rewriter(path.read_text(), self.rewrite_action, filename=str(path)) except UnicodeDecodeError: # pragma: no cover log.error('Error', exc_info=True) else: try: result = rw() except PTParseError: self.errors = True if self.collect_errors: return raise file_out = pathlib.Path(str(path) + '.out') file_out.write_text(result, encoding='utf-8') self.output_files.append(file_out)
def read_yaml(filename, logger=None): """Read YAML file. Parameters ---------- filename : `~pathlib.Path` Filename logger : `~logging.Logger` Logger Returns ------- data : dict YAML file content as a dict """ path = make_path(filename) if logger is not None: logger.info(f"Reading {path}") text = path.read_text() return yaml.safe_load(text)
def compile(self, path: Path, module_slug: str) -> CompiledModule: """ Detect common errors in the user's code. Raise ModuleCompileError, ModuleExitedError or ModuleTimeoutError on error. """ code = path.read_text() try: code_object = compile( code, filename=f"{module_slug}.py", mode="exec", dont_inherit=True, optimize=0, # keep assertions -- we use them! ) except SyntaxError as err: raise ModuleCompileError from err ret = CompiledModule(module_slug, marshal.dumps(code_object)) self._validate(ret) return ret
def loadGlobFile(path): '''Reads a list of glob patterns from the given file''' if path.exists(): return path.read_text().split() else: return []
def version(): """ Get the local package version. """ namespace = {} path = Path("src", _config["name"], "__version__.py") exec(path.read_text(), namespace) return namespace["__version__"]
def parse_config(cls, path: Path) -> Config: config_dict = yaml.safe_load(path.read_text()) try: return Config(**config_dict) except TypeError as ex: raise InvalidConfigError(f"Invalid config: {ex}")
def _process_file(path: Path, func: Callable[[str], str]): orig_content = path.read_text() new_content = func(orig_content) path.write_text(new_content)