def test_upload(tmpdir): clear_github_release_and_tags() tag_name = "0.1.0" ghr.gh_release_create(REPO_NAME, tag_name, publish=True) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1") _create_asset(dist_dir, "asset_2") _create_asset(dist_dir, "asset_3") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/asset_1") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (1, "asset_1"), ] }])) with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/asset_*") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (3, "asset_*"), ] }]))
def test_upload(tmpdir): clear_github_release_and_tags() tag_name = "0.1.0" ghr.gh_release_create( REPO_NAME, tag_name, publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1") _create_asset(dist_dir, "asset_2") _create_asset(dist_dir, "asset_3") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/asset_1") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (1, "asset_1"), ]} ])) with push_dir(tmpdir): ghr.gh_asset_upload( REPO_NAME, tag_name, ["dist/asset_*", "dist/asset_1"]) assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (3, "asset_*"), ]} ]))
def _upload_release(release_tag, args): """This function creates a release named ``release_tag`` and uploads associated packages. If ``args.re_upload`` is True, existing packages with matching names are deleted first. The ``args`` parameter is a dictionary where the following keys are expected to be set: * ``repo_name``: Specified as a string with the following format "<org_name>/<repo_name>". * ``release_packages``: List of packages specified as relative or absolute paths. * ``re_upload``: True or False. If True, existing packages with matching names are deleted first using :py:func:`_delete_matching_packages`. * ``dry_run``: True or False """ assert release_tag is not None # Create release gh_release_create(args.repo_name, release_tag, publish=True, prerelease=False) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages(args.repo_name, release_tag, args.release_packages) # Upload packages gh_asset_upload(args.repo_name, release_tag, args.release_packages, args.dry_run) return True
def esgf_upload(starting_directory, build_list, name, upload_flag=False, prerelease_flag=False, dryrun=False): """Upload binaries to GitHub release as assets.""" if upload_flag is None: upload_flag = query_for_upload() if not upload_flag: return if prerelease_flag: print "Marking as prerelease" print "build list in upload:", build_list for repo in build_list: print "repo:", repo os.chdir(os.path.join(starting_directory, repo)) repo_handle = Repo(os.getcwd()) latest_tag = get_latest_tag(repo_handle) print "latest_tag:", latest_tag if not name: release_name = latest_tag else: release_name = name if latest_tag in get_releases("ESGF/{}".format(repo)): print "Updating the assets for the latest tag {}".format(latest_tag) gh_asset_upload("ESGF/{}".format(repo), latest_tag, "{}/{}/dist/*".format(starting_directory, repo), dry_run=dryrun, verbose=False) else: print "Creating release version {} for {}".format(latest_tag, repo) gh_release_create("ESGF/{}".format(repo), "{}".format(latest_tag), publish=True, name=release_name, prerelease=prerelease_flag, dry_run=dryrun, asset_pattern="{}/{}/dist/*".format(starting_directory, repo)) print "Upload completed!"
def esgf_upload(starting_directory, build_list, name, upload_flag=False, prerelease_flag=False, dryrun=False): """Upload binaries to GitHub release as assets.""" if upload_flag is None: upload_flag = query_for_upload() if not upload_flag: return if prerelease_flag: print "Marking as prerelease" print "build list in upload:", build_list for repo in build_list: print "repo:", repo os.chdir(os.path.join(starting_directory, repo)) repo_handle = Repo(os.getcwd()) print "active branch before upload:", repo_handle.active_branch latest_tag = get_latest_tag(repo_handle) print "latest_tag:", latest_tag if not name: release_name = latest_tag else: release_name = name published_releases = get_published_releases("ESGF/{}".format(repo)) if latest_tag in published_releases: if not prerelease_flag: print "removing prerelease label" gh_release_edit("ESGF/{}".format(repo), latest_tag, prerelease=False) if is_prerelease("ESGF/{}".format(repo), latest_tag): raise RuntimeError("Prerelease flag not removed") print "Updating the assets for the latest tag {}".format( latest_tag) gh_asset_upload("ESGF/{}".format(repo), latest_tag, "{}/{}/dist/*".format(starting_directory, repo), dry_run=dryrun, verbose=False) else: print "Creating release version {} for {}".format(latest_tag, repo) gh_release_create("ESGF/{}".format(repo), "{}".format(latest_tag), publish=True, name=release_name, prerelease=prerelease_flag, dry_run=dryrun, asset_pattern="{}/{}/dist/*".format( starting_directory, repo)) print "Upload completed!"
def _upload_prerelease(args): # Set default prerelease name prerelease_name = args.prerelease_name if prerelease_name is None: prerelease_name = "%s (updated on %s)" % ( args.prerelease_tag.title(), get_current_date() ) # Create release gh_release_create( args.repo_name, args.prerelease_tag, name=prerelease_name, publish=True, prerelease=True ) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages( args.repo_name, args.prerelease_tag, args.prerelease_packages) # Upload packages gh_asset_upload( args.repo_name, args.prerelease_tag, args.prerelease_packages, args.dry_run ) # Remove obsolete assets if args.prerelease_packages_clear_pattern is not None: gh_asset_delete( args.repo_name, args.prerelease_tag, args.prerelease_packages_clear_pattern, keep_pattern=args.prerelease_packages_keep_pattern, dry_run=args.dry_run ) # If needed, update target commit sha = args.prerelease_sha if sha is not None: # If a branch name is specified, get associated commit refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha) if refs: assert len(refs) == 1 branch = sha sha = refs[0]["object"]["sha"] print("resolved '%s' to '%s'" % (branch, sha)) gh_release_edit( args.repo_name, args.prerelease_tag, target_commitish=sha ) # If needed, update name associated with the release gh_release_edit( args.repo_name, args.prerelease_tag, name=prerelease_name ) _cancel_additional_appveyor_builds(args.prerelease_tag) return True
def sign_release(self, release, other_names, asc_names, is_newest_release): """Download/sign unsigned assets, upload .asc counterparts. Create SHA256SUMS.txt with all assets included and upload it with SHA256SUMS.txt.asc counterpart. """ repo = self.repo tag = release.get('tag_name', None) if not tag: print('Release have no tag name, skip release\n') return with ChdirTemporaryDirectory() as tmpdir: with open(SHA_FNAME, 'w') as fdw: sdist_match = None for name in other_names: if name == SHA_FNAME: continue gh_asset_download(repo, tag, name) if not self.no_ppa: sdist_match = sdist_match \ or SDIST_NAME_PATTERN.match(name) if not '%s.asc' % name in asc_names or self.force: self.sign_file_name(name) if self.force: gh_asset_delete(repo, tag, '%s.asc' % name, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % name, dry_run=self.dry_run) sumline = '%s %s\n' % (sha256_checksum(name), name) fdw.write(sumline) self.sign_file_name(SHA_FNAME, detach=False) gh_asset_delete(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) if sdist_match and is_newest_release: self.make_ppa(sdist_match, tmpdir, tag)
def test_delete_keep_pattern(tmpdir): clear_github_release_and_tags() tag_name = "1.0.0" ghr.gh_release_create(REPO_NAME, tag_name, publish=True) dist_dir = tmpdir.ensure("dist", dir=True) for asset_name in """ awesome-{tag_name}.dev1-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev1-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-win_amd64.whl awesome-{tag_name}.dev2-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev2-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-win_amd64.whl """.strip().format(tag_name=tag_name).splitlines(): if not asset_name: continue _create_asset(dist_dir, asset_name) with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (12, "*"), ] }])) ghr.gh_asset_delete(REPO_NAME, tag_name, "awesome*manylinux1*", keep_pattern="awesome*dev2*") assert (check_releases([{ "tag_name": tag_name, "package_pattern": [ (10, "*"), (2, "awesome-%s.dev1*macosx*" % tag_name), (2, "awesome-%s.dev1*win*" % tag_name), (6, "awesome-%s.dev2*" % tag_name), ] }]))
def test_delete_keep_pattern(tmpdir): clear_github_release_and_tags() tag_name = "1.0.0" ghr.gh_release_create( REPO_NAME, tag_name, publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) for asset_name in """ awesome-{tag_name}.dev1-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev1-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev1-cp36-cp36m-win_amd64.whl awesome-{tag_name}.dev2-cp27-cp27m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp27-cp27m-win_amd64.whl awesome-{tag_name}.dev2-cp36-cp36m-macosx_10_11_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-manylinux1_x86_64.whl awesome-{tag_name}.dev2-cp36-cp36m-win_amd64.whl """.strip().format(tag_name=tag_name).splitlines(): if not asset_name: continue _create_asset(dist_dir, asset_name) with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (12, "*"), ]} ])) ghr.gh_asset_delete(REPO_NAME, tag_name, "awesome*manylinux1*", keep_pattern="awesome*dev2*") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (10, "*"), (2, "awesome-%s.dev1*macosx*" % tag_name), (2, "awesome-%s.dev1*win*" % tag_name), (6, "awesome-%s.dev2*" % tag_name), ]} ]))
def do_release(release_tag): expected_packages = package_names(release_tag) pause("We will now add release %s with %s assets" % (release_tag, len(expected_packages))) # Create release do_commit(release_tag=release_tag, push=True) gh_release_create(REPO_NAME, release_tag, publish=True) # Generate packages generate_packages(release_tag) # Upload packages gh_asset_upload(REPO_NAME, release_tag, PACKAGE_DIR + "/*")
def upload_fw(file, version, codename, today): print("uploading: " + file) codename = codename.split('-')[0] f = set_folder(file) subprocess.call([ 'rclone', 'copy', file, 'osdn:/storage/groups/x/xi/xiaomifirmwareupdater/' + f + '/' + version + '/' + codename + '/', '-v' ]) gh_release_create("XiaomiFirmwareUpdater/firmware_xiaomi_" + codename, "{0}-{1}".format(v, today), publish=True, name="{0}-{1}".format(v, today)) gh_asset_upload("XiaomiFirmwareUpdater/firmware_xiaomi_" + codename, "{0}-{1}".format(v, today), "fw_" + codename + "_*")
def _upload_release(release_tag, args): assert release_tag is not None # Create release gh_release_create(args.repo_name, release_tag, publish=True, prerelease=False) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages(args.repo_name, release_tag, args.release_packages) # Upload packages gh_asset_upload(args.repo_name, release_tag, args.release_packages, args.dry_run) return True
def _download_test_prerequisites(tmpdir): clear_github_release_and_tags() ghr.gh_release_create(REPO_NAME, "1.0.0", publish=True) ghr.gh_release_create(REPO_NAME, "2.0.0", publish=True) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1_a") _create_asset(dist_dir, "asset_1_boo") _create_asset(dist_dir, "asset_1_bar") _create_asset(dist_dir, "asset_2_a") _create_asset(dist_dir, "asset_2_boo") _create_asset(dist_dir, "asset_2_bar") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, "1.0.0", "dist/asset_1_*") ghr.gh_asset_upload(REPO_NAME, "2.0.0", "dist/asset_2_*")
def _download_test_prerequisites(tmpdir): clear_github_release_and_tags() ghr.gh_release_create( REPO_NAME, "1.0.0", publish=True ) ghr.gh_release_create( REPO_NAME, "2.0.0", publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1_a") _create_asset(dist_dir, "asset_1_boo") _create_asset(dist_dir, "asset_1_bar") _create_asset(dist_dir, "asset_2_a") _create_asset(dist_dir, "asset_2_boo") _create_asset(dist_dir, "asset_2_bar") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, "1.0.0", "dist/asset_1_*") ghr.gh_asset_upload(REPO_NAME, "2.0.0", "dist/asset_2_*")
def test_delete_simple(tmpdir): clear_github_release_and_tags() tag_name = "0.1.0" ghr.gh_release_create( REPO_NAME, tag_name, publish=True ) dist_dir = tmpdir.ensure("dist", dir=True) _create_asset(dist_dir, "asset_1_foo") _create_asset(dist_dir, "asset_2_foo") _create_asset(dist_dir, "asset_3_foo") _create_asset(dist_dir, "asset_1_bar") _create_asset(dist_dir, "asset_2_bar") _create_asset(dist_dir, "asset_3_bar") with push_dir(tmpdir): ghr.gh_asset_upload(REPO_NAME, tag_name, "dist/*") ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_2_foo") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), (3, "asset_*_bar"), ]} ])) ghr.gh_asset_delete(REPO_NAME, tag_name, "asset_*_bar") assert (check_releases([ {"tag_name": tag_name, "package_pattern": [ (1, "asset_1_foo"), (1, "asset_3_foo"), ]} ]))
def sign_release(self, release, other_names, asc_names, is_newest_release): """Download/sign unsigned assets, upload .asc counterparts. Create SHA256SUMS.txt with all assets included and upload it with SHA256SUMS.txt.asc counterpart. """ repo = self.repo tag = release.get('tag_name', None) if not tag: print('Release have no tag name, skip release\n') return with ChdirTemporaryDirectory() as tmpdir: with open(SHA_FNAME, 'w') as fdw: sdist_match = None for name in other_names: if name == SHA_FNAME: continue gh_asset_download(repo, tag, name) if not self.no_ppa: sdist_match = sdist_match \ or SDIST_NAME_PATTERN.match(name) apk_match = UNSIGNED_APK_PATTERN.match(name) if apk_match: unsigned_name = name name = self.sign_apk(unsigned_name, apk_match.group(1)) gh_asset_upload(repo, tag, name, dry_run=self.dry_run) gh_asset_delete(repo, tag, unsigned_name, dry_run=self.dry_run) if not '%s.asc' % name in asc_names or self.force: self.sign_file_name(name) if self.force: gh_asset_delete(repo, tag, '%s.asc' % name, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % name, dry_run=self.dry_run) sumline = '%s %s\n' % (sha256_checksum(name), name) fdw.write(sumline) self.sign_file_name(SHA_FNAME, detach=False) gh_asset_delete(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) gh_asset_upload(repo, tag, '%s.asc' % SHA_FNAME, dry_run=self.dry_run) if sdist_match and is_newest_release: self.make_ppa(sdist_match, tmpdir, tag)
def _upload_prerelease(args): # Set default prerelease name prerelease_name = args.prerelease_name if prerelease_name is None: prerelease_name = "%s (updated on %s)" % (args.prerelease_tag.title(), get_current_date()) # Create release gh_release_create(args.repo_name, args.prerelease_tag, name=prerelease_name, publish=True, prerelease=True) packages = _collect_packages(args.prerelease_packages) # Remove existing assets matching selected ones if args.re_upload: _delete_matching_packages(args.repo_name, args.prerelease_tag, packages) else: # or skip upload of existing packages matching_packages = _get_matching_packages(args.repo_name, args.prerelease_tag, packages) for matching_package in matching_packages: if matching_package in packages: print("skipping %s package " "(already uploaded)" % matching_package) packages.remove(matching_package) if matching_packages: print("") # Upload packages gh_asset_upload(args.repo_name, args.prerelease_tag, packages, args.dry_run) # Remove obsolete assets if args.prerelease_packages_clear_pattern is not None: gh_asset_delete(args.repo_name, args.prerelease_tag, args.prerelease_packages_clear_pattern, keep_pattern=args.prerelease_packages_keep_pattern, dry_run=args.dry_run) # If needed, update target commit sha = args.prerelease_sha if sha is not None: # If a branch name is specified, get associated commit refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha) if refs: assert len(refs) == 1 branch = sha sha = refs[0]["object"]["sha"] print("resolved '%s' to '%s'" % (branch, sha)) # Check that sha exists if gh_commit_get(args.repo_name, sha) is None: raise ValueError( "Failed to get commit associated with --prerelease-sha: %s" % sha) gh_release_edit(args.repo_name, args.prerelease_tag, target_commitish=sha) # Set a draft first, and switch to prerelease afterward so that # the release date is current. gh_release_edit( args.repo_name, args.prerelease_tag, draft=True, ) # Update draft, prerelease and name properties. gh_release_edit(args.repo_name, args.prerelease_tag, name=prerelease_name, draft=False, prerelease=True) _cancel_additional_appveyor_builds(args.prerelease_tag) return True
def upload(repo_name, root_dir, incoming_dir, hashalgo, github_token=None): """Upload incoming files associated them with hashalgo release.""" if github_token: github_release._github_token_cli_arg = github_token hashcmd = get_hashcmd(hashalgo) if not hashcmd: raise ValueError('hashalgo "' + hashalgo + '" not found') if not os.path.isdir(incoming_dir): raise ValueError("Missing " + incoming_dir + " directory") hashalgo_dir = os.path.join(root_dir, hashalgo) if not os.path.isdir(hashalgo_dir): os.mkdir(hashalgo_dir) # Download information about current release # Get current fileindex try: hashalgo_csv = download_fileindex_csv(repo_name, hashalgo_dir, hashalgo, github_token) fileindex = read_fileindex_csv(hashalgo_csv) except ValueError: # New release hashalgo_csv = os.path.join(hashalgo_dir, hashalgo + ".csv") fileindex = [] # Get list of successfully uploaded assets (to avoid uploading them again) # and delete partially uploaded ones. uploaded_assets = (github_release.get_assets(repo_name, hashalgo) if fileindex else []) uploaded_hashes = [] for asset in uploaded_assets: if asset["state"] == "uploaded": uploaded_hashes.append(asset["name"]) else: # Remove asset partially uploaded github_release.gh_asset_delete(repo_name, hashalgo, asset["name"]) # Update release information with incoming data # Add incoming files to fileindex and hashalgo_dir filenames = [ f for f in os.listdir(incoming_dir) if os.path.isfile(os.path.join(incoming_dir, f)) and not f.startswith(".") ] for filename in filenames: filepath = os.path.join(incoming_dir, filename) checksum = hashcmd(filepath) try: fileindex.index([checksum, filename]) except ValueError: # new item fileindex.append([checksum, filename]) # Make sure the hash-named file is present hashfilepath = os.path.join(hashalgo_dir, checksum) if not os.path.isfile(hashfilepath): copyfile(filepath, hashfilepath) # Create new hashalgo.csv from existing and incoming files fileindex.sort(key=lambda a: (a[1].casefold(), a[0])) write_fileindex_csv(hashalgo_csv, fileindex) hashalgo_md = os.path.join(root_dir, hashalgo_dir, hashalgo + ".md") write_fileindex_md(hashalgo_md, fileindex, repo_name, hashalgo) # Upload updated releaes info and new data files # Create hashalgo release (in case it does not exist) github_release.gh_release_create(repo_name, hashalgo, publish=True) # Delete old hashalgo.csv and hashalgo.md github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".csv") github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".md") # Upload new hashalgo.csv and hashalgo.md github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_csv) github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_md) # Upload new data files for [checksum, filename] in fileindex: if checksum in uploaded_hashes: # already uploaded continue filepath = os.path.join(hashalgo_dir, checksum) github_release.gh_asset_upload(repo_name, hashalgo, filepath) # Copy md file content into release notes with open(hashalgo_md, "r") as file: release_notes = file.read() github_release.gh_release_edit(repo_name, hashalgo, body=release_notes)