def get_encrpyted_path(original_path, surfix=default_surfix): """ Find the output encrypted file /dir path (by adding a surfix). Example: - file: ``${home}/test.txt`` -> ``${home}/test-encrypted.txt`` - dir: ``${home}/Documents`` -> ``${home}/Documents-encrypted`` """ p = Path(original_path).absolute() encrypted_p = p.change(new_fname=p.fname + surfix) return encrypted_p.abspath
def fixcode(**kwargs): """ auto pep8 format all python file in ``source code`` and ``tests`` dir. """ # repository direcotry repo_dir = Path(__file__).parent.absolute() # source code directory source_dir = Path(repo_dir, package.__name__) if source_dir.exists(): print("Source code locate at: '%s'." % source_dir) print("Auto pep8 all python file ...") source_dir.autopep8(**kwargs) else: print("Source code directory not found!") # unittest code directory unittest_dir = Path(repo_dir, "tests") if unittest_dir.exists(): print("Unittest code locate at: '%s'." % unittest_dir) print("Auto pep8 all python file ...") unittest_dir.autopep8(**kwargs) else: print("Unittest code directory not found!") print("Complete!")
def get_decrpyted_path(encrypted_path, surfix=default_surfix): """ Find the original path of encrypted file or dir. Example: - file: ``${home}/test-encrypted.txt`` -> ``${home}/test.txt`` - dir: ``${home}/Documents-encrypted`` -> ``${home}/Documents`` """ surfix_reversed = surfix[::-1] p = Path(encrypted_path).absolute() fname = p.fname fname_reversed = fname[::-1] new_fname = fname_reversed.replace(surfix_reversed, "", 1)[::-1] decrypted_p = p.change(new_fname=new_fname) return decrypted_p.abspath
def __attrs_post_init__(self): try: self.config = TagConfig.from_json( Path(self.path, app_config.tag_config_file)) except Exception as e: raise NotValidTagDirError( NotValidTagDirError.tpl_config_error.format( self.path, app_config.tag_config_file, str(e), )) if not Path(self.path, app_config.docker_file).exists(): raise NotValidTagDirError("{} not found in {}".format( app_config.docker_file, self.path, ))
def make_gzip(this_file, filename): """ 将文件名为 <filename> 的 ``.tsv`` 文件 (后缀名必须是tsv, 否则出粗) 压缩成 ``.tsv.gz`` 的压缩包文件. 执行该操作的 python 脚本必须和 ``.tsv`` 文件在同一目录下. :type this_file: str :param this_file: 当前 python 脚本的文件全路径 :type filename: str :param filename: ``.tsv`` 文件名 """ if not filename.endswith(".tsv"): raise ValueError dst = Path(this_file).change(new_basename=filename + ".gz") if not dst.exists(): with open(Path(this_file).change(new_basename=filename).abspath, "rb") as f: b = f.read() with gzip.open(dst.abspath, 'wb') as f: f.write(b)
def run(self): node = nodes.Element() node.document = self.state.document current_file = self.state.document.current_source current_dir = Path(current_file).parent image_dir = Path(Path(current_file).parent, self.arguments[0]) n_columns = self.options.get("n_columns", 3) if image_dir.exists(): output_rst = derive_rst( current_dir=current_dir, image_dir=image_dir, n_columns=n_columns, ) else: output_rst = "" view_list = StringList(output_rst.splitlines(), source='') sphinx.util.nested_parse_with_titles(self.state, view_list, node) return node.children
def __attrs_post_init__(self): try: self.config = RepoConfig.from_json( Path(self.path, app_config.repo_config_file).abspath) except Exception as e: raise NotValidRepoDirError( NotValidRepoDirError.tpl_config_error.format( self.path, app_config.repo_config_file, str(e), ))
def lines_stats(dir_path, file_filter): """Lines count of selected files under a directory. :return n_files: number of files :return n_lines: number of lines """ n_files = 0 n_lines = 0 for p in Path(dir_path).select_file(file_filter): n_files += 1 n_lines += count_lines(p.abspath) return n_files, n_lines
def derive_toctree_rst(self, current_file): """ Generate the rst content:: .. toctree:: args ... example.rst ... :param current_file: :return: """ TAB = " " * 4 lines = list() # create the .. toctree:: and its options lines.append(".. toctree::") for opt in TocTree.option_spec: value = self.options.get(opt) if value is not None: line = "{indent}:{option}: {value}".format( indent=TAB, option=opt, value=value, ).rstrip() lines.append(line) lines.append("") if self._opt_append_ahead in self.options: for line in list(self.content): lines.append(TAB + line) index_file = self.options.get(self._opt_index_file, self._opt_index_file_default) article_folder = ArticleFolder( index_file=index_file, dir_path=Path(current_file).parent.abspath, ) for af in article_folder.sub_article_folders: line = "{indent}{title} <{relpath}>".format( indent=TAB, title=af.title, relpath=af.rel_path, ) lines.append(line) if self._opt_append_ahead not in self.options: for line in list(self.content): lines.append(TAB + line) lines.append("") return "\n".join(lines)
def get_title(self): """ Get title line from .rst file. **中文文档** 从一个 ``_filename`` 所指定的 .rst 文件中, 找到顶级标题. 也就是第一个 ``====`` 或 ``----`` 或 ``~~~~`` 上面一行. """ header_bar_char_list = "=-~+*#^" lines = list() for cursor_line in textfile.readlines(self.rst_path, strip="both", encoding="utf-8"): if cursor_line.startswith(".. include::"): relative_path = cursor_line.split("::")[-1].strip() included_path = Path( Path(self.rst_path).parent.abspath, relative_path) if included_path.exists(): cursor_line = included_path.read_text(encoding="utf-8") lines.append(cursor_line) rst_content = "\n".join(lines) cursor_previous_line = None for cursor_line in rst_content.split("\n"): for header_bar_char in header_bar_char_list: if cursor_line.startswith(header_bar_char): flag_full_bar_char = cursor_line == header_bar_char * len( cursor_line) flag_line_length_greather_than_1 = len(cursor_line) >= 1 flag_previous_line_not_empty = bool(cursor_previous_line) if flag_full_bar_char \ and flag_line_length_greather_than_1 \ and flag_previous_line_not_empty: return cursor_previous_line.strip() cursor_previous_line = cursor_line msg = "Warning, this document doesn't have any %s header!" % header_bar_char_list return None
def generate_terraform_script(tf_dir): """ This function looking for ``main.tf.tpl``, ``variables.tf.tpl`` files in ``tf_dir``. And use jinja2 template engine to generate the real tf files. It pass in the config object to dynamically inject values. :param tf_dir: terraform workspace directory. """ tf_dir = Path(tf_dir) if not tf_dir.is_dir(): raise TypeError tf_files = ["main", "variables", "backend", "output"] for file in tf_files: tpl_file = Path(tf_dir, file + ".tf.tpl") tf_file = Path(tf_dir, file + ".tf") if tpl_file.exists(): tpl = jinja2.Template(tpl_file.read_text(encoding="utf-8")) content = tpl.render(config=config) tf_file.write_text(content, encoding="utf-8")
def test_convert_to_item(self): item = self.setting1.convert_to_item( dict( movie_id=1, title="The Title", description="The Description", genres="Act Bio", )) assert item.title == "The Title" assert item.subtitle == "The Description" assert item.arg == "1" assert item.autocomplete == "1 - The Title" assert item.icon == Path(ALFRED_FTS, "movie-icon.png").abspath
def get_config_dir(self): self.intro_text = "Seems like you are using this application for a first time. Please, specify the Houdini config folder where you have " "houdini.env" " file." if (os.name == "posix" or os.name == "Darwin"): get_path = Path().home().joinpath( "Library/Preferences/houdini/") #Path("~").expanduser() else: get_path = Path().home().joinpath( "documents") #Path.expanduser("~") / "documents" self.intro = qt.QMessageBox() self.intro.setBaseSize(qtcore.QSize(360, 180)) self.intro.setText("Choose Houdini config folder") self.intro.setInformativeText(self.intro_text) self.reaction = self.intro.exec_() if self.reaction == 1024: self.path_btn = qt.QFileDialog.getExistingDirectory( self, "Open Image", str(get_path)) else: self.path_btn = "" return self.path_btn
def test_walk(self): for _, _, sub_packages, sub_modules in self.pkg.walk(): assert_is_strictly_ascending([ i.fullname for i in sub_packages ]) assert_is_strictly_ascending([ i.fullname for i in sub_modules ]) assert len(list(Path(self.pkg.path).select_by_ext(".py"))) == \ len(list(self.pkg.walk(pkg_only=False)))
def __init__(self): self.__configuration_path = get_root_dir() self.__packages_path = Path() #loading of config if self.__configuration_path.exists(): with open(self.__configuration_path.abspath, "r") as f: data = json.load(f) self.__packages_path = Path(data["cfg_path"]) else: false_dir = True while false_dir: self.__packages_path = self.__request_dir__( ) ##gettting that folder from user files_to_check = self.__packages_path.iterdir() for f in files_to_check: if f.basename == "houdini.env": false_dir = False self.dingus_box = qt.QMessageBox() self.dingus_box.setBaseSize(qtcore.QSize(360, 160)) if false_dir: self.dingus_box.setText("Bad Folder") self.dingus_box.setInformativeText( "Hey, you are a dingus!\nThat ain't a folder I've asked for!" ) else: self.dingus_box.setText("Done") self.dingus_box.setInformativeText( "Config file has been created.\nIf you need to change the foder just delete hou_packager.json file." ) self.dingus_box.exec_() with open(self.__configuration_path.abspath, "w") as f: new_dic = {} new_dic["cfg_path"] = self.__packages_path.abspath data = json.dumps(new_dic, indent=3) f.write(data)
def get_index(reset=True): if Path(Config.index_dir).exists(): if reset: try: shutil.rmtree(Config.index_dir) except: pass else: idx = index.open_dir(Config.index_dir) return idx os.mkdir(Config.index_dir) idx = index.create_in(dirname=Config.index_dir, schema=bookmark_schema) return idx
def _download_db_file_if_not_exists(self): if self.db_file_path is None: self.db_file_path = self._default_db_file_path_mapper[ self.simple_or_comprehensive] if self.download_url is None: self.download_url = self._default_download_url_mapper[ self.simple_or_comprehensive] p = Path(self.db_file_path) if not p.exists(): if self.simple_or_comprehensive is self.SimpleOrComprehensiveArgEnum.simple: download_db_file( db_file_path=self.db_file_path, download_url=self.download_url, chunk_size=1024 * 1024, progress_size=1024 * 1024, ) elif self.simple_or_comprehensive is self.SimpleOrComprehensiveArgEnum.comprehensive: download_db_file( db_file_path=self.db_file_path, download_url=self.download_url, chunk_size=1024 * 1024, progress_size=50 * 1024 * 1024, )
def __init__(self, simple_zipcode=True, db_file_dir=HOME_USZIPCODE): Path(db_file_dir).mkdir(exist_ok=True) if simple_zipcode: if not is_simple_db_file_exists(db_file_dir): download_simple_db_file(db_file_dir) engine = connect_to_simple_zipcode_db(db_file_dir) self.zip_klass = SimpleZipcode else: # pragma: no cover if not is_db_file_exists(db_file_dir): download_db_file(db_file_dir) engine = connect_to_zipcode_db(db_file_dir) self.zip_klass = Zipcode self.engine = engine self.ses = sessionmaker(bind=engine)()
def sub_article_folders(self): """ Returns all valid ArticleFolder sitting inside of :attr:`ArticleFolder.dir_path`. """ l = list() for p in Path.sort_by_fname( Path(self.dir_path).select_dir(recursive=False)): af = ArticleFolder(index_file=self.index_file, dir_path=p.abspath) try: if af.title is not None: l.append(af) except: pass return l
class Config(object): DB_HOST = None DB_PORT = None DB_DATABASE = None DB_USERNAME = None DB_PASSWORD = None RAW_CONFIG_FILE = Path(__file__).parent.change(new_basename="config.json") @classmethod def update_from_config(cls): data = json.loads( Path(cls.RAW_CONFIG_FILE).read_text(encoding="utf-8")) for key, value in data.items(): if hasattr(cls, key): setattr(cls, key, value)
def test_to_file(self): can = CannedTier(PROJECT_NAME="my_project", STAGE="dev") with raises(Exception): can.to_file() can.root_dir = Path(__file__).parent.abspath with raises(Exception): can.to_file() can.rel_path = "./template.json" with raises(Exception): can.to_file() can.create_template() with raises(EnvironmentError): can.to_file() can.to_file(overwrite=True)
def read_compressed_tsv(this_file, filename): """ 从当前脚本所在的文件夹读取压缩的 ``.tsv.gz`` 文件. 读成 ``pd.DataFrame``. :type this_file: str :param this_file: :type filename: str :param filename: ``.tsv.gz`` 文件 :rtype: pd.DataFrame """ if not filename.endswith(".tsv.gz"): raise ValueError dst = Path(this_file).change(new_basename=filename) return pd.read_csv( dst.abspath, sep="\t", compression="gzip", )
def init_packages(folder): packages = {} if not folder.exists(): folder.mkdir() return packages else: files = folder.iterdir() for f in files: path = Path(f) if path.ext == ".json": with open(path.abspath, "r") as f_read: data = json.load(f_read) if not "enable" in data.keys(): data["enable"] = True obj = Package(path, path.fname, data) packages[path.fname] = obj return packages
class Command: name: str = attr.ib(validator=attr.validators.instance_of(str)) actions = attr.ib(factory=list) # type: typing.List[typing.Union[Action, str]] script: 'Script' = attr.ib(default=None, validator=attr.validators.optional(attr.validators.instance_of(Script))) _template = Path(TPL_DIR, "Command.tpl").read_text(encoding="utf-8") def __attrs_post_init__( self): if self.script is not None: self.script.add_command(self) @property def title(self): return f"<Command {self.name}>" def call(self, *args) -> str: """ 渲染 Command 被调用的代码块, 形如: <${CommandName} ${Arg1} ${Arg2} ...} :param args: :return: """ return "<{}{}>".format( self.name, " " + " ".join(args) if len(args) else "" ) def dump(self, verbose=True) -> str: """ 渲染整个 Command 代码块 :return: """ if verbose: print(f"dump Command({self.name}) ...") return remove_empty_line( render_template( self._template, command=self, render_action=render_action, ) )
class SendFocusWindow(Action): name = attr.ib(validator=attr.validators.instance_of(str)) actions = attr.ib(factory=list) # type: typing.List[typing.Union[Action, str]] _template = Path(TPL_DIR, "SendLabel.tpl").read_text(encoding="utf-8") @property def title(self) -> str: return f"<SendFocusWin>" def dump(self) -> str: if len(self.actions): return remove_empty_line( render_template( self._template, send_label=self, render_action=render_action, ) ) else: return ""
def fixcode(): # repository direcotry repo_dir = Path(__file__).absolute().parent.parent # source code directory source_dir = Path(repo_dir, repo_dir.basename.replace("-project", "")) if source_dir.exists(): print("Source code locate at: '%s'." % source_dir) print("Auto pep8 all python file ...") source_dir.autopep8() else: print("Source code directory not found!") # unittest code directory unittest_dir = Path(repo_dir, "tests") if unittest_dir.exists(): print("Unittest code locate at: '%s'." % unittest_dir) print("Auto pep8 all python file ...") unittest_dir.autopep8() else: print("Unittest code directory not found!") print("Complete!")
def _apply(config_data, group_data, apply_data, dry_run, task_type, source_file_dir, wtf_attr): wow_dir_path = config_data[constant.Syntax.WOW_DIR_PATH] tasks = apply_data[task_type] target_file_duplicate_filter = set() for task_data in tasks: members = evaluate_members(task_data=task_data, group_data=group_data) source_file = Path(source_file_dir, task_data[constant.Syntax.FILE]) source_file_content = source_file.read_text(encoding="utf-8") for member in members: account, realm, char = member.split(".") wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path, account=account, realm=realm, char=char) target_file = getattr(wtf_char, wtf_attr) if target_file.abspath not in target_file_duplicate_filter: print(f"copy '{source_file}' ---> '{target_file.abspath}'") if not dry_run: target_file.atomic_write_text(source_file_content, overwrite=True) target_file_duplicate_filter.add(target_file.abspath)
def get_tag_property_name_mapper(): # pragma: no cover """ .. note:: fetch all AWS Object that has ``.Tags`` property. """ TROPOSPHERE_NAME = "troposphere" pkg = picage.Package(TROPOSPHERE_NAME) TROPOSPHERE_DIR = pkg.path tag_property_name_mapper = dict() for p in Path(pkg.path).select_by_ext(".py", recursive=False): if p.fname != "__init__": if p.fname == "dynamodb2": continue module_import_name = "{}.{}".format( TROPOSPHERE_NAME, str(p.relative_to(TROPOSPHERE_DIR)).replace("/", ".")[:-3]) imported_module = importlib.import_module(module_import_name) # find out all subclass of AWSObject or AWSProperty for line in p.read_text().split("\n"): if "class" in line and ("(AWSObject):" in line): class_name = line.replace("class", "") \ .replace("(AWSObject):", "") \ .strip() aws_object_class = getattr(imported_module, class_name) tag_property_name = _get_tags_attr(aws_object_class) if tag_property_name is not None: try: tag_property_name_mapper[ aws_object_class. resource_type] = tag_property_name except: pass return tag_property_name_mapper
def download_db_file( db_file_path: str, download_url: str, chunk_size: int, progress_size: int, ): Path(db_file_path).parent.mkdir(parents=True, exist_ok=True) print(f"Download {db_file_path} from {download_url} ...") response = requests.get(download_url, stream=True) downloaded_size = 0 next_log_threshold = progress_size with atomic_write(db_file_path, mode="wb", overwrite=True) as f: for chunk in response.iter_content(chunk_size): if not chunk: break f.write(chunk) downloaded_size += chunk_size if downloaded_size >= next_log_threshold: print(" {} downloaded ...".format( repr_data_size(downloaded_size))) next_log_threshold += progress_size print(" Complete!")
def fly(self): """ Generate doc tree. """ dst_dir = Path(self.conf_file).parent.abspath package_dir = Path(dst_dir, self.package.shortname) # delete existing api document try: if package_dir.exists(): shutil.rmtree(package_dir.abspath) except Exception as e: print("'%s' can't be removed! Error: %s" % (package_dir, e)) # create .rst files for pkg, parent, sub_packages, sub_modules in self.package.walk(): if not is_ignored(pkg, self.ignored_package): dir_path = Path(*([ dst_dir, ] + pkg.fullname.split("."))) init_path = Path(dir_path, "__init__.rst") make_dir(dir_path.abspath) make_file( init_path.abspath, self.generate_package_content(pkg), ) for mod in sub_modules: if not is_ignored(mod, self.ignored_package): module_path = Path(dir_path, mod.shortname + ".rst") make_file( module_path.abspath, self.generate_module_content(mod), )
def get_testdata(): """ **中文文档** 下载测试数据。 """ for page, state, county, zipcode, street in testdata: url = urlencoder.browse_home_listpage_url(state, county, zipcode, street) filepath = Path("testdata", "%s.html" % page) if not filepath.exists(): html = spider.get_html(url, encoding="utf-8") textfile.write(html, filepath.abspath) for href in zillow_house_url_list: url = urlencoder.url_join(href) zid = href.split("/")[-2] filepath = Path("testdata", "%s.html" % zid) if not filepath.exists(): html = spider.get_html(url, encoding="utf-8") textfile.write(html, filepath.abspath)
def get_html(url): fpath = Path(__file__).change( new_basename="{}.html".format(fingerprint.of_text(url))) if fpath.exists(): html = fpath.read_text(encoding="utf-8") else: # i am lazy, I don't want to login, session_id is the key headers = { "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3", "accept-encoding": "gzip, deflate, br", "accept-language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7,zh-TW;q=0.6,ja;q=0.5", "sec-fetch-mode": "navigate", "sec-fetch-site": "none", # "cookie": "csrftoken=9999JP9mz49NwmqfykyrMupzywy3XZNGgq7Rly23j0u2QuEHdDcOreAnWIjGIhtj; _ga=GA1.2.1853091755.1574187377; _gid=GA1.2.909819738.1574187377; intercom-id-aaaa73km=aaaa1111-18b1-48af-849d-94ad2564a3bc; ga_cid=1853091755.1574187377; sessionid=t2xtsqy6pkf3mkndvd8oljs102ffp6bc; intercom-session-aaaa73km=bXI5eG01b1pJdHlmSk9mYU1jSzZPNGVpWng0KzR6Snk3MngwUjJtNVRzWHhzSHlEenBqTXYyNDRwMWZaekxydC0tUmtTUVo1bjlaNmo3SDVIVFVhcGpCZz09--d927dd0ff0f890889144d645e77525943c851cf5" } res = requests.get(url, headers=headers) html = res.text fpath.write_text(html, encoding="utf-8") return html
#!/usr/bin/env python # -*- coding: utf-8 -*- """ 1. Creates auto-generate doc for each module / class / method / variable. 2. Creates Table of Content for sub chapter if you follow this `Style Guide <http://www.wbh-doc.com.s3.amazonaws.com/docfly/02-sphinx-doc-style-guide/index.html>`_ """ import docfly from pathlib_mate import Path import loc as package source_dir = Path(__file__).absolute().parent.append_parts("source").abspath # --- Manually Made Doc --- # Comment this if you don't follow this style guide. # http://www.wbh-doc.com.s3.amazonaws.com/docfly/02-sphinx-doc-style-guide/index.html doc = docfly.DocTree(source_dir) doc.fly(table_of_content_header="Table of Content") # --- Api Reference Doc --- package_name = package.__name__ doc = docfly.ApiReferenceDoc(package_name, dst=source_dir, ignored_package=[ "%s.pkg" % package_name, "%s.zzz_ezinstall.py" % package_name, ]) doc.fly()
# -*- coding: utf-8 -*- import pytest from pathlib_mate import Path from rabbit_docker_cicd_pipeline.md5 import get_dockerfile_md5 HERE = Path(__file__).parent def test_get_dockerfile_md5(): v1 = get_dockerfile_md5(HERE.append_parts("Dockerfile1").abspath) v2 = get_dockerfile_md5(HERE.append_parts("Dockerfile2").abspath) v3 = get_dockerfile_md5(HERE.append_parts("Dockerfile3").abspath) v4 = get_dockerfile_md5(HERE.append_parts("Dockerfile4").abspath) assert v1 == v2 == v3 == v4 if __name__ == "__main__": import os basename = os.path.basename(__file__) pytest.main([basename, "-s", "--tb=native"])
# -*- coding: utf-8 -*- from __future__ import print_function, unicode_literals import base64 from pathlib_mate import Path from cryptography.fernet import Fernet from windtalker.cipher import BaseCipher from windtalker.exc import PasswordError from windtalker import fingerprint from windtalker import py23 if py23.is_py2: input = raw_input HOME_DIR = Path.home() WINDTALKER_CONFIG_FILE = Path(HOME_DIR, ".windtalker") def read_windtalker_password(): # pragma: no cover return WINDTALKER_CONFIG_FILE.read_text(encoding="utf-8").strip() class SymmetricCipher(Fernet, BaseCipher): """ A symmetric encryption algorithm utility class helps you easily encrypt/decrypt text, files and even a directory. :param password: The secret password you use to encrypt all your message. If you feel uncomfortable to put that in your code, you can leave it empty. The system will ask you manually enter that later.