if (is_debug): # Only debug package should have pdbs inside z.write(*path_basename_tuple(result_machine_dir / 'dbginfo' / 'foo_spotify.pdb')) print(f'Generated file: {component_zip}') if (not is_debug): # Release pdbs are packed in a separate package pdb_zip = output_dir / 'foo_spotify_pdb.zip' if (pdb_zip.exists()): pdb_zip.unlink() with ZipFile(pdb_zip, 'w', zipfile.ZIP_DEFLATED) as z: z.write(*path_basename_tuple(result_machine_dir / 'dbginfo' / 'foo_spotify.pdb')) print(f'Generated file: {pdb_zip}') if __name__ == '__main__': parser = argparse.ArgumentParser( description='Pack component to .fb2k-component') parser.add_argument('--debug', default=False, action='store_true') args = parser.parse_args() call_wrapper.final_call_decorator( 'Packing component', 'Packing component: success', 'Packing component: failure!')(pack)(args.debug)
import shutil from pathlib import Path import call_wrapper import generate_project_filters def prepare(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent projects = [('mozjs_debug.vcxitems', root_dir / 'mozjs' / 'Debug'), ('mozjs_release.vcxitems', root_dir / 'mozjs' / 'Release'), ('scintilla.vcxproj', root_dir / 'submodules' / 'scintilla' / 'win32'), ('lexilla.vcxproj', root_dir / 'submodules' / 'scintilla' / 'lexilla' / 'src'), ('wtl.vcxitems', root_dir / 'submodules' / 'WTL')] for project_file, project_dir in projects: shutil.copy2(cur_dir / 'additional_files' / project_file, str(project_dir) + '/') generate_project_filters.generate(project_dir / project_file) if __name__ == '__main__': call_wrapper.final_call_decorator('Preparing project files', 'Preparing: success', 'Preparing: failure!')(prepare)()
'generate_commit_hash_header.py').generate_header_custom)( repo_dir=root_dir, output_dir=root_dir / '_result' / 'AllPlatforms' / 'generated', component_prefix='SPTF') call_decorator('SourceLink configuration file generation')(load_module( scripts_path / 'generate_source_link_config.py').generate_config_custom)( repo_dir=root_dir, output_dir=root_dir / '_result' / 'AllPlatforms' / 'generated', repo='theqwertiest/foo_spotify') call_decorator('3rd-party notices generation')(load_module( scripts_path / 'generate_third_party.py').generate)( root_dir=root_dir, component_name='Spotify Integration') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Setup project') parser.add_argument('--skip_submodules_download', default=False, action='store_true') parser.add_argument('--skip_submodules_patches', default=False, action='store_true') args = parser.parse_args() call_wrapper.final_call_decorator( 'Preparing project repo', 'Setup complete!', 'Setup failed!')(setup)(args.skip_submodules_download, args.skip_submodules_patches)
def configure(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent scintilla_dir = root_dir / "submodules" / "scintilla" assert (scintilla_dir.exists() and scintilla_dir.is_dir()) lexers_dir = scintilla_dir / "lexers" for lexer in lexers_dir.glob("*"): if (lexer.name != "LexCPP.cxx"): lexer.unlink() subprocess.check_call("py LexGen.py", cwd=scintilla_dir / "scripts", shell=True) subprocess.check_call("py LexillaGen.py", cwd=scintilla_dir / "lexilla" / "scripts", shell=True) shutil.copy2(cur_dir / "additional_files" / "lexilla.vcxproj", str(scintilla_dir / "lexilla" / "src") + '/') shutil.copy2(cur_dir / "additional_files" / "scintilla.vcxproj", str(scintilla_dir / "win32") + '/') if __name__ == '__main__': call_wrapper.final_call_decorator( "Configuring Scintilla", "Configuring Scintilla: success", "Configuring Scintilla: failure!")(configure)()
conf = cur_dir / "doc" / "conf.json" readme = cur_dir / "doc" / "README.md" generate_package_info.generate() pack_json = root_dir / "_result" / "AllPlatforms" / "generated" / "doc_package_info.json" subprocess.check_call([ "npx", "jsdoc", "--configure", str(conf), "--readme", str(readme), "--destination", str(output_dir), "--package", str(pack_json), *jsdocs ], cwd=root_dir, shell=True) print(f"Generated docs: {output_dir}") if __name__ == '__main__': parser = argparse.ArgumentParser( description= 'Create HTML from JSDoc (requires `npm`, `jsdoc` and `tui-jsdoc-template`)' ) parser.add_argument('--debug', default=False, action='store_true') args = parser.parse_args() call_wrapper.final_call_decorator( "Generating HTML doc", "Generating HTML doc: success", "Generating HTML doc: failure!")(generate)(args.debug)
for d in (root_dir / 'packages').glob('*'): for f in d.glob('*'): package_zip = output_packages_dir / f'{f.name}.zip' with ZipFile(package_zip, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9) as z: zipdir(z, f) print(f'Generated package: {package_zip}') # there should be only one package per dir anyway break with ZipFile(output_zip, 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=9) as z: zipdir(z, output_packages_dir, 'packages') zipdir(z, root_dir / 'fonts', 'fonts') z.write(root_dir / 'layout' / 'theme.fcl', 'layout/theme.fcl') print(f'Generated file: {output_zip}') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Pack') call_wrapper.final_call_decorator('Packing', 'Packing: success', 'Packing: failure!')(pack)()
def download_submodule(root_dir, submodule_name): print(f"Downloading {submodule_name}...") try: subprocess.check_call(f"git submodule update --init --depth=10 -- submodules/{submodule_name}", cwd=root_dir, shell=True) except subprocess.CalledProcessError: try: subprocess.check_call(f"git submodule update --init --depth=50 -- submodules/{submodule_name}", cwd=root_dir, shell=True) except subprocess.CalledProcessError: # Shallow copy does not honour default branch config subprocess.check_call("git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/*", cwd=root_dir/"submodules"/submodule_name, shell=True) subprocess.check_call(f"git submodule deinit --force -- submodules/{submodule_name}", cwd=root_dir, shell=True) subprocess.check_call(f"git submodule update --init --force -- submodules/{submodule_name}", cwd=root_dir, shell=True) def download(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent subprocess.check_call("git submodule sync", cwd=root_dir, shell=True) subprocess.check_call("git submodule foreach git reset --hard", cwd=root_dir, shell=True) for subdir in [f for f in (root_dir/"submodules").iterdir() if f.is_dir()]: download_submodule(root_dir, subdir.name) if __name__ == '__main__': call_wrapper.final_call_decorator( "Downloading submodules", "Downloading submodules: success", "Downloading submodules: failure!" )(download)()
output.write(f"<!-- Generated by '{Path(__file__).name}'-->\n") output.write("<Project>\n") output.write(" <PropertyGroup>\n") output.write(f" <VersionPrefix>{version.major}.{version.minor}.{version.patch}</VersionPrefix>\n") if version.prerelease: output.write(f" <VersionSuffix>{version.prerelease}+{commit_hash}</VersionSuffix>\n") output.write(" </PropertyGroup>\n") output.write("</Project>\n") print(f"Generated file: {output_file}") if __name__ == '__main__': repo_dir = get_cwd_repo_root() output_dir = repo_dir/"_result"/"AllPlatforms"/"generated" parser = argparse.ArgumentParser(description='Generate props file with version') parser.add_argument('--output_dir', required=True) args = parser.parse_args() call_wrapper.final_call_decorator( "Generating props file with version", "Props file was successfully generated!", "Props file generation failed!" )( generate_props_custom )( repo_dir, args.output_dir )
r'^\[unreleased\]: ' + repo.replace('.', '\.') + r'/compare/v(.*)\.\.\.HEAD$', changelog, re.MULTILINE) old_version = it.group(1) unreleased_link = it.group(0).replace(old_version, new_version) changelog = ( changelog[:it.start()] + unreleased_link + '\n' + f'[{new_version}]: {repo}/compare/v{old_version}...v{new_version}' + changelog[it.end():]) with Path(changelog_path).open('w') as f: f.write(changelog) if not silent: print(f"Updated changelog: {changelog_path}") return current_changelog if __name__ == '__main__': parser = argparse.ArgumentParser( description='Update changelog with new version') parser.add_argument('--root_dir', default=Path(os.getcwd()).absolute()) parser.add_argument('release_version', type=str, help='release version') args = parser.parse_args() call_wrapper.final_call_decorator( "Updating changelog", "Updating changelog: success!", "Updating changelog: failed!")(update)(args.root_dir, args.release_version)
#!/usr/bin/env python3 import shutil from pathlib import Path import call_wrapper def configure(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent discord_dir = root_dir/"submodules"/"discord-rpc" assert(discord_dir.exists() and discord_dir.is_dir()) shutil.copy2(cur_dir/"additional_files"/"discord-rpc.vcxproj", str(discord_dir/"src") + '/') if __name__ == '__main__': call_wrapper.final_call_decorator( "Configuring Discord RPC", "Configuring Discord RPC: success", "Configuring Discord RPC: failure!" )(configure)()
finally: if temp_dir: shutil.rmtree(temp_dir) def publish(repo_dir: PathLike, release_version, release_assets): if not repo_dir: repo_dir = get_cwd_repo_root() if not release_version: raise ValueError('`release_version` is empty') parsed_ver = semver.VersionInfo.parse(release_version) changelog = git_add_new_changelog(repo_dir, release_version) git_commit_and_push_new_files(repo_dir, release_version) git_publish_release(repo_dir, release_version, changelog, release_assets) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Publish GitHub release') parser.add_argument('--repo_dir', default=get_cwd_repo_root()) parser.add_argument('--release_assets', default=[], nargs='+') parser.add_argument('release_version', type=str, help='release version') args = parser.parse_args() call_wrapper.final_call_decorator("Publishing", "Published successfully!", "Publishing failed!")(publish)( repo_dir=args.repo_dir, release_version=args.release_version, release_assets=args.release_assets)
repo_dir=root_dir, output_dir=root_dir / '_result' / 'AllPlatforms' / 'generated', component_prefix='SMP') call_decorator('SourceLink configuration file generation')(load_module( scripts_path / 'generate_source_link_config.py').generate_config_custom)( repo_dir=root_dir, output_dir=root_dir / '_result' / 'AllPlatforms' / 'generated', repo='theqwertiest/foo_spider_monkey_panel') call_decorator('3rd-party notices generation')(load_module( scripts_path / 'generate_third_party.py').generate)( root_dir=root_dir, component_name='Spider Monkey Panel') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Setup project') parser.add_argument('--skip_mozjs', default=False, action='store_true') parser.add_argument('--skip_submodules_download', default=False, action='store_true') parser.add_argument('--skip_submodules_patches', default=False, action='store_true') args = parser.parse_args() call_wrapper.final_call_decorator( "Preparing project repo", "Setup complete!", "Setup failed!")(setup)(args.skip_mozjs, args.skip_submodules_download, args.skip_submodules_patches)
from shutil import rmtree from zipfile import ZipFile import call_wrapper def unpack(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent zip_dir = root_dir / "submodules" / "mozjs" assert (zip_dir.exists() and zip_dir.is_dir()) output_dir = root_dir / "mozjs" if (output_dir.exists()): rmtree(output_dir) output_dir.mkdir(parents=True) zip_arcs = list(zip_dir.glob("*.zip")) assert (len(zip_arcs) == 1) zip_arc = Path(zip_arcs[0]) assert (zipfile.is_zipfile(zip_arc)) with ZipFile(zip_arc) as z: z.extractall(output_dir) if __name__ == '__main__': call_wrapper.final_call_decorator("Unpacking mozjs", "Unpacking mozjs: success", "Unpacking mozjs: failure!")(unpack)()
#!/usr/bin/env python3 import subprocess from pathlib import Path import call_wrapper def patch(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent patches = [Path(p) for p in (cur_dir / "patches").glob('*.patch')] for p in patches: assert (p.exists() and p.is_file()) subprocess.check_call( f"git apply --ignore-whitespace {' '.join(str(p) for p in patches)}", cwd=root_dir, shell=True) if __name__ == '__main__': call_wrapper.final_call_decorator("Patching submodules", "Patching submodules: success", "Patching submodules: failure!")(patch)()
import call_wrapper def generate(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent output_dir = root_dir / "_result" / "AllPlatforms" / "generated" output_dir.mkdir(parents=True, exist_ok=True) output_file = output_dir / 'doc_package_info.json' output_file.unlink(missing_ok=True) tag = subprocess.check_output("git describe --tags", shell=True).decode('ascii').strip() if (tag.startswith('v')): # JsDoc template adds `v` to the version, hence the removal tag = tag[1:] data = {"name": "Spider Monkey Panel", "version": tag} with open(output_file, 'w') as output: json.dump(data, output) print(f"Generated file: {output_file}") if __name__ == '__main__': call_wrapper.final_call_decorator( "Generating package info for docs", "Generating package info: success", "Generating package info: failure!")(generate)()
else: print("No docs found. Skipping...") print(f"Generated file: {component_zip}") if (not is_debug): # Release pdbs are packed in a separate package pdb_zip = output_dir / "foo_spider_monkey_panel_pdb.zip" if (pdb_zip.exists()): pdb_zip.unlink() with ZipFile(pdb_zip, "w", zipfile.ZIP_DEFLATED) as z: z.write(*path_basename_tuple(result_machine_dir / "dbginfo" / "foo_spider_monkey_panel.pdb")) for f in mozjs_machine_dir.glob("*.pdb"): z.write(*path_basename_tuple(f)) print(f"Generated file: {pdb_zip}") if __name__ == '__main__': parser = argparse.ArgumentParser( description='Pack component to .fb2k-component') parser.add_argument('--debug', default=False, action='store_true') args = parser.parse_args() call_wrapper.final_call_decorator( "Packing component", "Packing component: success", "Packing component: failure!")(pack)(args.debug)
] def get_cmd_close_issue(issue_number): return [ 'gh', 'api', f'/repos/:owner/:repo/issues/{str(issue_number)}', '--method=PATCH', '-Fstate=close', '--silent' ] for i in issues: cmd_close_issue = get_cmd_close_issue(i['number']) cmd_remove_label = get_cmd_remove_label(i['number']) print('> ' + ' '.join(cmd_close_issue)) subprocess.check_call(cmd_close_issue, env=os.environ, cwd=repo_dir) print('> ' + ' '.join(cmd_remove_label)) subprocess.check_call(cmd_remove_label, env=os.environ, cwd=repo_dir) print('All issues were closed') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Close all fixed issues') parser.add_argument('--repo_dir', default=Path(os.getcwd()).absolute()) args = parser.parse_args() call_wrapper.final_call_decorator( "Closing issues", "Issues were successfully closed!", "Failed to close all issues!")(close)(args.repo_dir)
shutil.copytree(root_dir / "component" / "docs", ghp_gen_dir / "docs", dirs_exist_ok=True) shutil.copytree(root_dir / "_result" / "html", ghp_gen_dir / "docs" / "html", dirs_exist_ok=True) ghp_inc_dir = gh_pages_dir / "_includes" ghp_inc_dir.mkdir(parents=True, exist_ok=True) shutil.copy2(root_dir / "CHANGELOG.md", gh_pages_dir / "_includes" / "CHANGELOG.md") shutil.copy2(root_dir / "component" / "samples" / "readme.md", gh_pages_dir / "_includes" / "samples_readme.md") if __name__ == '__main__': cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent gh_pages_dir = root_dir / "gh-pages" parser = argparse.ArgumentParser(description='Update GitHub Pages content') parser.add_argument('--gh_pages_dir', default=gh_pages_dir) args = parser.parse_args() call_wrapper.final_call_decorator( "Updating GitHub Pages content", "Updating GitHub Pages content: success", "Updating GitHub Pages content: failure!")(update)(gh_pages_dir)
with open(output_file, 'w') as output: output.write( f'{component_name} uses third-party libraries or other resources that may\n' ) output.write( f'be distributed under licenses different than the {component_name} software.\n' ) output.write('') output.write('The linked notices are provided for information only.\n') output.write('\n') for (dep_name, license) in index: output.write( f'- [{dep_name} - {license}](licenses/{quote(dep_name)}.txt)\n' ) print(f'Generated file: {output_file}') if __name__ == '__main__': parser = argparse.ArgumentParser( description='Generate MD license file base on the folder with licenses' ) parser.add_argument('--component_name') args = parser.parse_args() call_wrapper.final_call_decorator( 'Generating MD license file', 'Generating MD license file: success', 'Generating MD license file: failure!')(generate)()
data = {} data['documents'] = { f'{PureWindowsPath(repo_dir)}*': f'https://raw.githubusercontent.com/{repo}/{commit_hash}/*' } with open(output_file, 'w') as output: json.dump(data, output) print(f"Generated file: {output_file}") if __name__ == '__main__': repo_dir = get_cwd_repo_root() output_dir = repo_dir / "_result" / "AllPlatforms" / "generated" parser = argparse.ArgumentParser( description='Generate source link configuration file') parser.add_argument('--output_dir', default=output_dir) parser.add_argument('--repo') parser.add_argument('--commit_hash', default="") args = parser.parse_args() call_wrapper.final_call_decorator( "Generating source link configuration file", "Source link configuration file was successfully generated!", "Source link configuration file failed!")(generate_config_custom)( repo_dir, args.output_dir, args.repo, args.commit_hash)
# Shallow copy does not honour default branch config subprocess.check_call( "git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/*", cwd=submodule_path, shell=True) subprocess.check_call("git fetch --all", cwd=submodule_path, shell=True) subprocess.check_call( f"git submodule update --init --force --recursive --remote -- submodules/{submodule_name}", cwd=root_dir, shell=True) def update(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent subprocess.check_call("git submodule sync", cwd=root_dir, shell=True) for subdir in [ f for f in (root_dir / "submodules").iterdir() if f.is_dir() ]: update_submodule(root_dir, subdir.name) if __name__ == '__main__': call_wrapper.final_call_decorator( "Updating submodules", "Updating submodules: success", "Updating submodules: failure!")(update)()
include_prefix = src['include_prefix'] filter = src['filter'] path = src['path'] path_prefix = src['path_prefix'] output.write( f' <Cl{include_prefix} Include="{path_prefix}{path}">\n' ) if filter: output.write(f' <Filter>{filter}</Filter>\n') output.write(f' </Cl{include_prefix}>\n') output.write(' </ItemGroup>\n') output.write(' <ItemGroup>\n') for i in {i['filter'] for i in source_files if i['filter']}: output.write(f' <Filter Include="{i}"/>\n') output.write(' </ItemGroup>\n') output.write(PROJECT_POSTFIX) print(f"Generated file: {output_file}") if __name__ == '__main__': parser = argparse.ArgumentParser( description='Generate project filter file') parser.add_argument('--project_file') args = parser.parse_args() call_wrapper.final_call_decorator( "Generating project filter file", "Generating: success", "Generating: failure!")(generate)(args.project_file)
#!/usr/bin/env python3 import subprocess from pathlib import Path import call_wrapper def patch(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent mozjs_patch = cur_dir/"patches"/"mozjs.patch" assert(mozjs_patch.exists() and mozjs_patch.is_file()) subprocess.check_call(f"git apply --ignore-whitespace {mozjs_patch}", cwd=root_dir, shell=True) if __name__ == '__main__': call_wrapper.final_call_decorator( "Patching mozjs", "Patching mozjs: success", "Patching mozjs: failure!" )(patch)()
print(f"Generated file: {output_file}") def generate_header(): cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent output_dir = root_dir/"_result"/"AllPlatforms"/"generated" generate_header_custom(output_dir) if __name__ == '__main__': cur_dir = Path(__file__).parent.absolute() root_dir = cur_dir.parent output_dir = root_dir/"_result"/"AllPlatforms"/"generated" parser = argparse.ArgumentParser(description='Generate header with commit hash definition') parser.add_argument('--output_dir', default=output_dir) parser.add_argument('--commit_hash', default="") args = parser.parse_args() call_wrapper.final_call_decorator( "Generating header with commit hash", "Header was successfully generated!", "Header generation failed!" )( generate_header_custom )( args.output_dir, args.commit_hash )
f"#define {component_prefix}_VERSION_MINOR {version.minor}\n") output.write( f"#define {component_prefix}_VERSION_PATCH {version.patch}\n") if version.prerelease: output.write( f'#define {component_prefix}_VERSION_PRERELEASE_TEXT "{version.prerelease}"\n' ) else: output.write( f'// #define {component_prefix}_VERSION_PRERELEASE_TEXT "placeholder"\n' ) print(f"Generated file: {output_file}") if __name__ == '__main__': repo_dir = get_cwd_repo_root() output_dir = repo_dir / "_result" / "AllPlatforms" / "generated" parser = argparse.ArgumentParser( description='Generate header with version') parser.add_argument('--output_dir', default=output_dir) parser.add_argument('--component_prefix', required=True) args = parser.parse_args() call_wrapper.final_call_decorator( "Generating header with version", "Header was successfully generated!", "Header generation failed!")(generate_header_custom)( repo_dir, args.output_dir, args.component_prefix)