def _delete_matching_packages(repo_name, tag, packages): """Delete all assets associated with ``repo_name`` and ``tag`` having file names matching the one associated with given ``packages``. Packages can be a list of file paths or globbing expressions. """ for local_asset_name in _get_matching_packages(repo_name, tag, packages): gh_asset_delete(repo_name, tag, _github_asset_name(local_asset_name))
def _upload_prerelease(args): # Set default prerelease name prerelease_name = args.prerelease_name if prerelease_name is None: prerelease_name = "%s (updated on %s)" % ( args.prerelease_tag.title(), get_current_date() ) # Create release gh_release_create( args.repo_name, args.prerelease_tag, name=prerelease_name, publish=True, prerelease=True ) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages( args.repo_name, args.prerelease_tag, args.prerelease_packages) # Upload packages gh_asset_upload( args.repo_name, args.prerelease_tag, args.prerelease_packages, args.dry_run ) # Remove obsolete assets if args.prerelease_packages_clear_pattern is not None: gh_asset_delete( args.repo_name, args.prerelease_tag, args.prerelease_packages_clear_pattern, keep_pattern=args.prerelease_packages_keep_pattern, dry_run=args.dry_run ) # If needed, update target commit sha = args.prerelease_sha if sha is not None: # If a branch name is specified, get associated commit refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha) if refs: assert len(refs) == 1 branch = sha sha = refs[0]["object"]["sha"] print("resolved '%s' to '%s'" % (branch, sha)) gh_release_edit( args.repo_name, args.prerelease_tag, target_commitish=sha ) # If needed, update name associated with the release gh_release_edit( args.repo_name, args.prerelease_tag, name=prerelease_name ) _cancel_additional_appveyor_builds(args.prerelease_tag) return True
def sign_release(self, release, other_names, asc_names, is_newest_release): """Download/sign unsigned assets, upload .asc counterparts. Create SHA256SUMS.txt with all assets included and upload it with SHA256SUMS.txt.asc counterpart. """ repo = self.repo tag = release.get('tag_name', None) if not tag: print('Release have no tag name, skip release\n') return with ChdirTemporaryDirectory() as tmpdir: with open(SHA_FNAME, 'w') as fdw: sdist_match = None for name in other_names: if name == SHA_FNAME: continue gh_asset_download(repo, tag, name) if not self.no_ppa: sdist_match = sdist_match \ or SDIST_NAME_PATTERN.match(name) if not '%s.asc' % name in asc_names or self.force: self.sign_file_name(name) if self.force: gh_asset_delete(repo, tag, '%s.asc' % name, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % name, dry_run=self.dry_run) sumline = '%s %s\n' % (sha256_checksum(name), name) fdw.write(sumline) self.sign_file_name(SHA_FNAME, detach=False) gh_asset_delete(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) if sdist_match and is_newest_release: self.make_ppa(sdist_match, tmpdir, tag)
def test_delete_keep_pattern(tmpdir): clear_github_release_and_tags() tag_name = "1.0.0" ghr.gh_release_create( REPO_NAME, tag_name, publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) for asset_name in """ awesome-{tag_name}.dev1-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev1-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-win_amd64.whl awesome-{tag_name}.dev2-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev2-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-win_amd64.whl """.strip().format(tag_name=tag_name).splitlines(): if not asset_name: continue _create_asset(dist_dir, asset_name) with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (12, "*"), ]} ])) ghr.gh_asset_delete(REPO_NAME, tag_name, "awesome*manylinux1*", keep_pattern="awesome*dev2*") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (10, "*"), (2, "awesome-%s.dev1*macosx*" % tag_name), (2, "awesome-%s.dev1*win*" % tag_name), (6, "awesome-%s.dev2*" % tag_name), ]} ]))
def _delete_matching_packages(repo_name, tag, packages): release = get_release_info(repo_name, tag) asset_names = [asset['name'] for asset in release['assets']] for package in packages: for path in glob.glob(package): local_asset_name = os.path.basename(path) # XXX Asset uploaded on GitHub always have "plus" sign found # in their name replaced by "dot". local_asset_name = local_asset_name.replace("+", ".") if local_asset_name in asset_names: gh_asset_delete(repo_name, tag, local_asset_name)
def test_delete_keep_pattern(tmpdir): clear_github_release_and_tags() tag_name = "1.0.0" ghr.gh_release_create(REPO_NAME, tag_name, publish=True) dist_dir = tmpdir.ensure("dist", dir=True) for asset_name in """ awesome-{tag_name}.dev1-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev1-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-win_amd64.whl awesome-{tag_name}.dev2-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev2-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-win_amd64.whl """.strip().format(tag_name=tag_name).splitlines(): if not asset_name: continue _create_asset(dist_dir, asset_name) with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (12, "*"), ] }])) ghr.gh_asset_delete(REPO_NAME, tag_name, "awesome*manylinux1*", keep_pattern="awesome*dev2*") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (10, "*"), (2, "awesome-%s.dev1*macosx*" % tag_name), (2, "awesome-%s.dev1*win*" % tag_name), (6, "awesome-%s.dev2*" % tag_name), ] }]))
def test_delete_simple(tmpdir): clear_github_release_and_tags() tag_name = "0.1.0" ghr.gh_release_create( REPO_NAME, tag_name, publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1_foo") _create_asset(dist_dir, "asset_2_foo") _create_asset(dist_dir, "asset_3_foo") _create_asset(dist_dir, "asset_1_bar") _create_asset(dist_dir, "asset_2_bar") _create_asset(dist_dir, "asset_3_bar") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_2_foo") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), (3, "asset_*_bar"), ]} ])) ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_*_bar") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), ]} ]))
def test_delete_simple(tmpdir): clear_github_release_and_tags() tag_name = "0.1.0" ghr.gh_release_create(REPO_NAME, tag_name, publish=True) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1_foo") _create_asset(dist_dir, "asset_2_foo") _create_asset(dist_dir, "asset_3_foo") _create_asset(dist_dir, "asset_1_bar") _create_asset(dist_dir, "asset_2_bar") _create_asset(dist_dir, "asset_3_bar") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_2_foo") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), (3, "asset_*_bar"), ] }])) ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_*_bar") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), ] }]))
def sign_release(self, release, other_names, asc_names, is_newest_release): """Download/sign unsigned assets, upload .asc counterparts. Create SHA256SUMS.txt with all assets included and upload it with SHA256SUMS.txt.asc counterpart. """ repo = self.repo tag = release.get('tag_name', None) if not tag: print('Release have no tag name, skip release\n') return with ChdirTemporaryDirectory() as tmpdir: with open(SHA_FNAME, 'w') as fdw: sdist_match = None for name in other_names: if name == SHA_FNAME: continue gh_asset_download(repo, tag, name) if not self.no_ppa: sdist_match = sdist_match \ or SDIST_NAME_PATTERN.match(name) apk_match = UNSIGNED_APK_PATTERN.match(name) if apk_match: unsigned_name = name name = self.sign_apk(unsigned_name, apk_match.group(1)) gh_asset_upload(repo, tag, name, dry_run=self.dry_run) gh_asset_delete(repo, tag, unsigned_name, dry_run=self.dry_run) if not '%s.asc' % name in asc_names or self.force: self.sign_file_name(name) if self.force: gh_asset_delete(repo, tag, '%s.asc' % name, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % name, dry_run=self.dry_run) sumline = '%s %s\n' % (sha256_checksum(name), name) fdw.write(sumline) self.sign_file_name(SHA_FNAME, detach=False) gh_asset_delete(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) if sdist_match and is_newest_release: self.make_ppa(sdist_match, tmpdir, tag)
def upload(repo_name, root_dir, incoming_dir, hashalgo, github_token=None): """Upload incoming files associated them with hashalgo release.""" if github_token: github_release._github_token_cli_arg = github_token hashcmd = get_hashcmd(hashalgo) if not hashcmd: raise ValueError('hashalgo "' + hashalgo + '" not found') if not os.path.isdir(incoming_dir): raise ValueError("Missing " + incoming_dir + " directory") hashalgo_dir = os.path.join(root_dir, hashalgo) if not os.path.isdir(hashalgo_dir): os.mkdir(hashalgo_dir) # Download information about current release # Get current fileindex try: hashalgo_csv = download_fileindex_csv(repo_name, hashalgo_dir, hashalgo, github_token) fileindex = read_fileindex_csv(hashalgo_csv) except ValueError: # New release hashalgo_csv = os.path.join(hashalgo_dir, hashalgo + ".csv") fileindex = [] # Get list of successfully uploaded assets (to avoid uploading them again) # and delete partially uploaded ones. uploaded_assets = (github_release.get_assets(repo_name, hashalgo) if fileindex else []) uploaded_hashes = [] for asset in uploaded_assets: if asset["state"] == "uploaded": uploaded_hashes.append(asset["name"]) else: # Remove asset partially uploaded github_release.gh_asset_delete(repo_name, hashalgo, asset["name"]) # Update release information with incoming data # Add incoming files to fileindex and hashalgo_dir filenames = [ f for f in os.listdir(incoming_dir) if os.path.isfile(os.path.join(incoming_dir, f)) and not f.startswith(".") ] for filename in filenames: filepath = os.path.join(incoming_dir, filename) checksum = hashcmd(filepath) try: fileindex.index([checksum, filename]) except ValueError: # new item fileindex.append([checksum, filename]) # Make sure the hash-named file is present hashfilepath = os.path.join(hashalgo_dir, checksum) if not os.path.isfile(hashfilepath): copyfile(filepath, hashfilepath) # Create new hashalgo.csv from existing and incoming files fileindex.sort(key=lambda a: (a[1].casefold(), a[0])) write_fileindex_csv(hashalgo_csv, fileindex) hashalgo_md = os.path.join(root_dir, hashalgo_dir, hashalgo + ".md") write_fileindex_md(hashalgo_md, fileindex, repo_name, hashalgo) # Upload updated releaes info and new data files # Create hashalgo release (in case it does not exist) github_release.gh_release_create(repo_name, hashalgo, publish=True) # Delete old hashalgo.csv and hashalgo.md github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".csv") github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".md") # Upload new hashalgo.csv and hashalgo.md github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_csv) github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_md) # Upload new data files for [checksum, filename] in fileindex: if checksum in uploaded_hashes: # already uploaded continue filepath = os.path.join(hashalgo_dir, checksum) github_release.gh_asset_upload(repo_name, hashalgo, filepath) # Copy md file content into release notes with open(hashalgo_md, "r") as file: release_notes = file.read() github_release.gh_release_edit(repo_name, hashalgo, body=release_notes)
def _upload_prerelease(args): # Set default prerelease name prerelease_name = args.prerelease_name if prerelease_name is None: prerelease_name = "%s (updated on %s)" % (args.prerelease_tag.title(), get_current_date()) # Create release gh_release_create(args.repo_name, args.prerelease_tag, name=prerelease_name, publish=True, prerelease=True) packages = _collect_packages(args.prerelease_packages) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages(args.repo_name, args.prerelease_tag, packages) else: # or skip upload of existing packages matching_packages = _get_matching_packages(args.repo_name, args.prerelease_tag, packages) for matching_package in matching_packages: if matching_package in packages: print("skipping %s package " "(already uploaded)" % matching_package) packages.remove(matching_package) if matching_packages: print("") # Upload packages gh_asset_upload(args.repo_name, args.prerelease_tag, packages, args.dry_run) # Remove obsolete assets if args.prerelease_packages_clear_pattern is not None: gh_asset_delete(args.repo_name, args.prerelease_tag, args.prerelease_packages_clear_pattern, keep_pattern=args.prerelease_packages_keep_pattern, dry_run=args.dry_run) # If needed, update target commit sha = args.prerelease_sha if sha is not None: # If a branch name is specified, get associated commit refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha) if refs: assert len(refs) == 1 branch = sha sha = refs[0]["object"]["sha"] print("resolved '%s' to '%s'" % (branch, sha)) # Check that sha exists if gh_commit_get(args.repo_name, sha) is None: raise ValueError( "Failed to get commit associated with --prerelease-sha: %s" % sha) gh_release_edit(args.repo_name, args.prerelease_tag, target_commitish=sha) # Set a draft first, and switch to prerelease afterward so that # the release date is current. gh_release_edit( args.repo_name, args.prerelease_tag, draft=True, ) # Update draft, prerelease and name properties. gh_release_edit(args.repo_name, args.prerelease_tag, name=prerelease_name, draft=False, prerelease=True) _cancel_additional_appveyor_builds(args.prerelease_tag) return True