def create_rst_file(dir_path): p = Path(dir_path) relpath = p.relative_to(dir_images) rst_dirpath = Path(dir_here, "99-附录 (Appendix)", "01-常用图标外链查询", relpath) if not rst_dirpath.exists(): rst_dirpath.mkdir() rst_path = Path(rst_dirpath, "index.rst") lines = list() lines.append(p.basename) lines.append("=" * 80) lines.append(".. contents:: 索引") lines.append(" :local:") sub_p_list = Path.sort_by_abspath(dir_path.select_dir(recursive=False)) if len(sub_p_list): lines.append("\n**目录**:\n") lines.append("\n.. articles::\n") for p_png in Path.sort_by_abspath(p.select_image(recursive=False)): lines.append("\n" + p_png.fname) lines.append("-" * 80) url = "/" + str(p_png.relative_to(dir_here)) directive = ".. image:: {}".format(url) lines.append(directive) content = "\n".join(lines) rst_path.write_text(content, "utf-8") if len(sub_p_list): for sub_p in sub_p_list: create_rst_file(sub_p)
def dropEvent(self, event): if event.mimeData().hasUrls: event.accept() e_urls = event.mimeData().urls() self.add_new = False self.final_path = "" for f in e_urls: new_path = Path(f.toLocalFile()) if new_path.is_dir(): folder_content = new_path.iterdir() for iterated_file in folder_content: if iterated_file.name == "otls" or iterated_file.name == "scripts" or iterated_file.name == "hdas" or iterated_file.name == "toolbar": self.final_path = new_path self.add_new = True if new_path.name == "otls": self.final_path = new_path.parent self.add_new = True if self.add_new: json_data = {} json_data["enable"] = True json_data["path"] = self.final_path.abspath json_file_path = str(packages_path / self.final_path.basename) json_file_path = Path(json_file_path + ".json") obj = Package(json_file_path, self.final_path.basename, json_data) obj.just_save_all() packages[self.final_path.basename] = obj window.load_newest(self.final_path.basename)
def reformat(**kwargs): """ auto pep8 format all python file in ``source code`` and ``tests`` dir. """ # repository direcotry repo_dir = Path(__file__).parent.absolute() # source code directory source_dir = Path(repo_dir, package.__name__) if source_dir.exists(): print("Source code locate at: '%s'." % source_dir) print("Auto pep8 all python file ...") source_dir.autopep8(**kwargs) else: print("Source code directory not found!") # unittest code directory unittest_dir = Path(repo_dir, "tests") if unittest_dir.exists(): print("Unittest code locate at: '%s'." % unittest_dir) print("Auto pep8 all python file ...") unittest_dir.autopep8(**kwargs) else: print("Unittest code directory not found!") print("Complete!")
def test_IO(): # path_list1 = [ p.absolute().abspath for p in Path("files").select_by_ext(".jpg") ] playlist1 = PlayList() playlist1.add_many(path_list1) playlist1.nowplaying = path_list1[0] playlist1.nowplaytime = 300 playlist1.dump("playlist1") path_list2 = [ p.absolute().abspath for p in Path("files").select_by_ext(".png") ] playlist2 = PlayList() playlist2.add_many(path_list2) playlist2.nowplaying = path_list2[0] playlist2.nowplaytime = 600 playlist2.dump("playlist2") playlist1 = PlayList.load("playlist1.dpl") # load assert len(playlist1) == 3 assert playlist1.nowplaying == path_list1[0] playlist2 = playlist1.merge("playlist2.dpl") # merge assert len(playlist1) == 6 assert playlist1.nowplaying == path_list1[0] assert len(playlist2) == 3
def remove_all_account_config_cache(): wow_path = Path( r"D:\HSH\Games\WOW Private\Client\World of Warcraft 3.3.5 enUS (Warman wod models)" ) for p in Path(wow_path, "WTF", "Account").select_file(recursive=True): if p.basename.endswith(".old") or p.basename.endswith("cache.md5"): p.remove()
def get_root_dir(): if (os.name == "posix" or os.name == "Darwin"): save_path = Path("~").expanduser() else: save_path = Path().home().joinpath( "documents") #Path.expanduser("~") / "documents" return save_path / "hou_packager.json"
def plan(self, workspace_dir): """ This method :param workspace_dir: :return: **中文文档** 此方法将 ``master_tier``, ``tier``, ``config_dir``, ``plan_file`` 中的 所有信息汇总, 在 ``workspace_dir`` 下生成多个文件夹, 每个文件夹都是一个单独的 ``aws cloudformation deploy`` 所需要的的文件. """ env_tag_list = extract_all_env_tag(self._plan) config_data_mapper = OrderedDict() # type: OrderedDict[str, dict] for env_tag in env_tag_list: p = Path(self._config_dir, "{}.json".format(env_tag)) if not p.exists(): raise FileNotFoundError( "the config file of environment `{}` not exists at '{}'".format(env_tag, p)) config_data_mapper[env_tag] = json.load( p.abspath, ignore_comments=True, verbose=False) pipeline = resolve_pipeline(self._plan) workspace_dir = Path(workspace_dir) workspace_dir.mkdir(parents=True, exist_ok=True) deploy_execution_counter = 0 for can_id_list, env_tag in pipeline: # counter is used in deploy workspace dir name space deploy_execution_counter += 1 deploy_workspace_dir = Path( workspace_dir, "{}-{}".format(str(deploy_execution_counter).zfill(3), env_tag) ) deploy_workspace_dir.mkdir(parents=True, exist_ok=True) config_data = config_data_mapper[env_tag] # collect template instance and file path # so we can generate final template files at once template_file_list = list() # type: List[TemplateFile] master_can = self._master_tier(**config_data) master_can.create_template() # master_can_label = self.canlabel_mapper[self.master_canlabel_id] # master_can = master_can_label.can_class(**config_data) # master_can.CONFIG_DIR = deploy_workspace_dir.abspath master_template_path = Path( deploy_workspace_dir, master_can.rel_path) template_file_list.append( TemplateFile( template=master_can.template, filepath=master_template_path, ) )
def _apply_saved_variables(config_data, group_data, apply_data, dry_run, task_type, source_file_dir, wtf_attr): wow_dir_path = config_data[constant.Syntax.WOW_DIR_PATH] tasks = apply_data[task_type] target_file_duplicate_filter = set() # type: typing.Set[str] final_content_cache = dict() # type: typing.Dict[str, str] all_wtf_char_list = list() # type: typing.List[wtf.WtfCharacter] for member in group_data["_all"]: account, realm, char = member.split(".") wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path, account=account, realm=realm, char=char) all_wtf_char_list.append(wtf_char) for task_data in tasks: members = evaluate_members(task_data=task_data, group_data=group_data) allow_addons = task_data[constant.Syntax.ALLOW_ADDONS] wtf_char_list = list() # type: typing.List[wtf.WtfCharacter] for member in members: account, realm, char = member.split(".") wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path, account=account, realm=realm, char=char) wtf_char_list.append(wtf_char) for wtf_char in wtf_char_list: for addon_sv_file in allow_addons: source_file = Path(source_file_dir, addon_sv_file) validate_exists(source_file) target_file = Path(getattr(wtf_char, wtf_attr), addon_sv_file) if source_file.abspath not in final_content_cache: tpl = jinja2.Template( source_file.read_text(encoding="utf-8")) final_content = tpl.render( characters=wtf_char_list, all_characters=all_wtf_char_list) final_content_cache[source_file.abspath] = final_content else: final_content = final_content_cache[source_file.abspath] if target_file.abspath not in target_file_duplicate_filter: print( f"render '{source_file}' -- write to -> '{target_file.abspath}'" ) if not dry_run: target_file.atomic_write_text(final_content, overwrite=True) target_file_duplicate_filter.add(target_file.abspath)
def plan(self): for repo_dir in Path(self.repos_dir).select_dir(recursive=False): try: repo = Repo(path=repo_dir.abspath) print(repo) for tag_dir in Path(repo.path).select_dir(recursive=False): try: tag = Tag(path=tag_dir.abspath, repo=repo) print(tag) except NotValidTagDirError: pass except NotValidRepoDirError: pass
def test_check_project_dir(self): def check_project_dir(p): service.check_project_dir(None, None, p) # /project_dir/tests/test_service.py with raises(TypeError): check_project_dir(Path(__file__)) # /project_dir/tests with raises(ValueError): check_project_dir(Path(__file__).parent) # /project_dir check_project_dir(Path(__file__).parent.parent)
def derive_rst(current_dir, image_dir, n_columns): """ scan ``image_dir`` find all image path, find the relative path to ``current_dir``, and put them in a table, ``n_columns`` width. return the list table rst directive text. """ current_dir, image_dir = Path(current_dir), Path(image_dir) image_list = [ Image(uri=str(p.relative_to(current_dir)), height=64, width=64) for p in image_dir.select_image() ] data = list(grouper(image_list, n_columns)) ltable = ListTable(data=data, header=False, index=False) return ltable.render()
def test(self): doc = ApiReferenceDoc( conf_file=Path(__file__).change(new_basename="conf.py").abspath, package_name=package_name, ignored_package=[ "{}.pkg".format(package_name), "{}.util.py".format(package_name), ]) doc.fly() assert Path(DIR_HERE, package_name, "api_reference_doc.rst").exists() assert Path(DIR_HERE, package_name, "doctree.rst").exists() assert not Path(DIR_HERE, package_name, "pkg").exists() assert not Path(DIR_HERE, package_name, "util").exists()
def __init__(self, name, path=None, parent=None, is_single_file=None): super(Package, self).__init__( name, path=path, parent=parent, is_single_file=is_single_file) self.sub_packages = OrderedDict() self.sub_modules = OrderedDict() # walk through all sub packages and sub modules if self.is_single_file is False: for p in Path.sort_by_abspath(self.path.iterdir()): # if it's a directory if p.is_dir(): # if there is a __init__.py file, must be a sub package if Path(p, "__init__.py").exists(): pkg = Package( name=name + "." + p.basename, path=p, parent=self, is_single_file=False, ) self.sub_packages[p.basename] = pkg # if it's a file else: # if it's a .py file, must be a module if p.ext == ".py" and p.fname != "__init__": module = Module( name=name + "." + p.fname, path=p, parent=self, is_single_file=True, ) self.sub_modules[p.fname] = module
def main(): """ 1. Go through the ./arsenal directory. 2. Read metadata from pypi.org. 3. Append summary to package name, create download link. 4. Generate the ``README.rst`` file Create the ``README.rst`` file. """ def filters(p): if p.basename == "index.rst": return True else: return False blocks = list() blocks.append(".. contents::\n\n") blocks.append(".. sectnum::\n") blocks.append(" :depth: 7\n") blocks.append(" :start: 1\n\n") for path in Path(root).select_file(filters): print("processing: %s ..." % path) header_value = len(path.parts) - n_parts lines = read_striped_lines(path.abspath) lines = sorted_content(lines) lines[1] = header_char_mapper[header_value] * 79 content = "\n".join(lines) + "\n\n\n" blocks.append(content) with open(readme_file, "wb") as f: content = "".join(blocks) f.write(content.encode("utf-8"))
def from_json(cls, path): """ :rtype: TagConfig """ if not Path(path).exists(): raise EnvironmentError(f"{path} doesn't exists!") return cls(**json_load(path))
def path_obj(self): """ access the Path object. API: https://pathlib-mate.readthedocs.io/ """ return Path(self.path)
class SendLabel(Action): name = attr.ib(validator=attr.validators.instance_of(str)) to = attr.ib(factory=list) # type: typing.List[str] actions = attr.ib(factory=list) # type: typing.List[typing.Union[Action, str]] _template = Path(TPL_DIR, "SendLabel.tpl").read_text(encoding="utf-8") @property def targets(self) -> str: return ", ".join(self.to) @property def title(self) -> str: return f"<SendLabel {self.targets}>" def dump(self) -> str: if len(self.to) and len(self.actions): return remove_empty_line( render_template( self._template, send_label=self, render_action=render_action, ) ) else: return ""
class TestServiceFileAndDirPath(object): service = Service( service_name=SERVICE_NAME, service_version=SERVICE_VERSION, stage=STAGE, aws_account_id=AWS_ACCOUNT_ID, aws_account_alias=AWS_ACCOUNT_ALIAS, project_dir=Path(__file__).parent.parent, root_workspace_dir=HOME, ) def test_build_lambda_dir(self): assert self.service.build_lambda_dir.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME, "build", "lambda" ).abspath def test_build_lambda_version_specified_dir(self): assert self.service.build_lambda_version_specified_dir.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME, "build", "lambda", SERVICE_VERSION ).abspath def test_deploy_pkg_zip(self): assert self.service.deploy_pkg_zip.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME, "build", "lambda", SERVICE_VERSION, "deploy-pkg.zip" ).abspath def test_source_zip(self): assert self.service.source_zip.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME, "build", "lambda", SERVICE_VERSION, "source.zip" ).abspath def test_layer_zip(self): assert self.service.layer_zip.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME, "build", "lambda", SERVICE_VERSION, "layer.zip" ).abspath def test_site_packages_dir(self): assert self.service.site_packages_dir.abspath == Path( HOME, py_lbd_dirname, SERVICE_NAME_SLUG, "build", "lambda", SERVICE_VERSION, "site-packages" ).abspath def test_s3_bucket_name(self): assert self.service.s3_bucket_name == "{}-{}-{}-deploy".format( AWS_ACCOUNT_ALIAS, SERVICE_NAME_SLUG, STAGE, ) def test_deploy_pkg_source_layer_uri(self): bucket_name = "{}-{}-{}-deploy".format( AWS_ACCOUNT_ALIAS, SERVICE_NAME, STAGE, ) assert self.service.deploy_pkg_s3_uri == "s3://{}/lambda/{}/{}/deploy-pkg.zip".format( bucket_name, SERVICE_NAME_SLUG, SERVICE_VERSION, ) assert self.service.source_s3_uri == "s3://{}/lambda/{}/{}/source.zip".format( bucket_name, SERVICE_NAME_SLUG, SERVICE_VERSION, ) assert self.service.layer_s3_uri == "s3://{}/lambda/{}/{}/layer.zip".format( bucket_name, SERVICE_NAME_SLUG, SERVICE_VERSION, )
class Hotkey: name: str = attr.ib() key: str = attr.ib() actions = attr.ib(factory=list) # type: typing.List[typing.Union[Action, str]] script: 'Script' = attr.ib(default=None, validator=attr.validators.optional(attr.validators.instance_of(Script))) _template = Path(TPL_DIR, "Hotkey.tpl").read_text(encoding="utf-8") def __attrs_post_init__(self): if self.script is not None: self.script.add_hotkey(self) @property def title(self) -> str: return f"<Hotkey {self.key}>" _send_labels = None # type: typing.Dict[str, SendLabel] def get_send_label_by_name(self, name) -> 'SendLabel': if self._send_labels is None: self._send_labels = dict() for action in self.actions: if isinstance(action, SendLabel): self._send_labels[action.name] = action return self._send_labels[name] def validate(self): # don't allow duplicate label in SendLabel actions. for action in self.actions : if isinstance(action, SendLabel): for label in action.to: for _action in self.actions: if isinstance(_action, SendLabel) \ and (action.name != _action.name) \ and (label in _action.to): raise ValueError(f"label {label} in {action} conflicts with {_action}") def dump(self, verbose=True) -> str: if verbose: print(f"dump Hotkey(name='{self.name}', key='{self.key}') ...") self.validate() if len(self.actions): content = remove_empty_line(render_template( self._template, hotkey=self, render_action=render_action, )) if content.count("\n") == 0: if verbose: print(" no action, skip") return "" else: return content else: if verbose: print(" no action, skip") return ""
def merge_true_table(): """Merge all true table into single excel file. """ writer = pd.ExcelWriter("True Table.xlsx") for p in Path(__file__).parent.select_by_ext(".csv"): df = pd.read_csv(p.abspath, index_col=0) df.to_excel(writer, p.fname, index=True) writer.save()
def __attrs_post_init__(self): try: self.config = TagConfig.from_json( Path(self.path, app_config.tag_config_file)) except Exception as e: raise NotValidTagDirError( NotValidTagDirError.tpl_config_error.format( self.path, app_config.tag_config_file, str(e), )) if not Path(self.path, app_config.docker_file).exists(): raise NotValidTagDirError("{} not found in {}".format( app_config.docker_file, self.path, ))
def make_gzip(this_file, filename): """ 将文件名为 <filename> 的 ``.tsv`` 文件 (后缀名必须是tsv, 否则出粗) 压缩成 ``.tsv.gz`` 的压缩包文件. 执行该操作的 python 脚本必须和 ``.tsv`` 文件在同一目录下. :type this_file: str :param this_file: 当前 python 脚本的文件全路径 :type filename: str :param filename: ``.tsv`` 文件名 """ if not filename.endswith(".tsv"): raise ValueError dst = Path(this_file).change(new_basename=filename + ".gz") if not dst.exists(): with open(Path(this_file).change(new_basename=filename).abspath, "rb") as f: b = f.read() with gzip.open(dst.abspath, 'wb') as f: f.write(b)
def run(self): node = nodes.Element() node.document = self.state.document current_file = self.state.document.current_source current_dir = Path(current_file).parent image_dir = Path(Path(current_file).parent, self.arguments[0]) n_columns = self.options.get("n_columns", 3) if image_dir.exists(): output_rst = derive_rst( current_dir=current_dir, image_dir=image_dir, n_columns=n_columns, ) else: output_rst = "" view_list = StringList(output_rst.splitlines(), source='') sphinx.util.nested_parse_with_titles(self.state, view_list, node) return node.children
def __attrs_post_init__(self): try: self.config = RepoConfig.from_json( Path(self.path, app_config.repo_config_file).abspath) except Exception as e: raise NotValidRepoDirError( NotValidRepoDirError.tpl_config_error.format( self.path, app_config.repo_config_file, str(e), ))
def lines_stats(dir_path, file_filter): """Lines count of selected files under a directory. :return n_files: number of files :return n_lines: number of lines """ n_files = 0 n_lines = 0 for p in Path(dir_path).select_file(file_filter): n_files += 1 n_lines += count_lines(p.abspath) return n_files, n_lines
def derive_toctree_rst(self, current_file): """ Generate the rst content:: .. toctree:: args ... example.rst ... :param current_file: :return: """ TAB = " " * 4 lines = list() # create the .. toctree:: and its options lines.append(".. toctree::") for opt in TocTree.option_spec: value = self.options.get(opt) if value is not None: line = "{indent}:{option}: {value}".format( indent=TAB, option=opt, value=value, ).rstrip() lines.append(line) lines.append("") if self._opt_append_ahead in self.options: for line in list(self.content): lines.append(TAB + line) index_file = self.options.get(self._opt_index_file, self._opt_index_file_default) article_folder = ArticleFolder( index_file=index_file, dir_path=Path(current_file).parent.abspath, ) for af in article_folder.sub_article_folders: line = "{indent}{title} <{relpath}>".format( indent=TAB, title=af.title, relpath=af.rel_path, ) lines.append(line) if self._opt_append_ahead not in self.options: for line in list(self.content): lines.append(TAB + line) lines.append("") return "\n".join(lines)
def get_title(self): """ Get title line from .rst file. **中文文档** 从一个 ``_filename`` 所指定的 .rst 文件中, 找到顶级标题. 也就是第一个 ``====`` 或 ``----`` 或 ``~~~~`` 上面一行. """ header_bar_char_list = "=-~+*#^" lines = list() for cursor_line in textfile.readlines(self.rst_path, strip="both", encoding="utf-8"): if cursor_line.startswith(".. include::"): relative_path = cursor_line.split("::")[-1].strip() included_path = Path( Path(self.rst_path).parent.abspath, relative_path) if included_path.exists(): cursor_line = included_path.read_text(encoding="utf-8") lines.append(cursor_line) rst_content = "\n".join(lines) cursor_previous_line = None for cursor_line in rst_content.split("\n"): for header_bar_char in header_bar_char_list: if cursor_line.startswith(header_bar_char): flag_full_bar_char = cursor_line == header_bar_char * len( cursor_line) flag_line_length_greather_than_1 = len(cursor_line) >= 1 flag_previous_line_not_empty = bool(cursor_previous_line) if flag_full_bar_char \ and flag_line_length_greather_than_1 \ and flag_previous_line_not_empty: return cursor_previous_line.strip() cursor_previous_line = cursor_line msg = "Warning, this document doesn't have any %s header!" % header_bar_char_list return None
def get_config_dir(self): self.intro_text = "Seems like you are using this application for a first time. Please, specify the Houdini config folder where you have " "houdini.env" " file." if (os.name == "posix" or os.name == "Darwin"): get_path = Path().home().joinpath( "Library/Preferences/houdini/") #Path("~").expanduser() else: get_path = Path().home().joinpath( "documents") #Path.expanduser("~") / "documents" self.intro = qt.QMessageBox() self.intro.setBaseSize(qtcore.QSize(360, 180)) self.intro.setText("Choose Houdini config folder") self.intro.setInformativeText(self.intro_text) self.reaction = self.intro.exec_() if self.reaction == 1024: self.path_btn = qt.QFileDialog.getExistingDirectory( self, "Open Image", str(get_path)) else: self.path_btn = "" return self.path_btn
def test_walk(self): for _, _, sub_packages, sub_modules in self.pkg.walk(): assert_is_strictly_ascending([ i.fullname for i in sub_packages ]) assert_is_strictly_ascending([ i.fullname for i in sub_modules ]) assert len(list(Path(self.pkg.path).select_by_ext(".py"))) == \ len(list(self.pkg.walk(pkg_only=False)))
def test_convert_to_item(self): item = self.setting1.convert_to_item( dict( movie_id=1, title="The Title", description="The Description", genres="Act Bio", )) assert item.title == "The Title" assert item.subtitle == "The Description" assert item.arg == "1" assert item.autocomplete == "1 - The Title" assert item.icon == Path(ALFRED_FTS, "movie-icon.png").abspath