def test_edit_tag_name(release_type):
    clear_github_release_and_tags()

    cases = {
        'draft': {"draft": True, "prerelease": False},
        'prerelease': {"draft": False, "prerelease": True},
        'release': {"draft": False, "prerelease": False}
    }

    params = cases[release_type]

    # Create release
    ghr.gh_release_create(
        REPO_NAME, "0.1.0",
        prerelease=params["prerelease"],
        publish=not params["prerelease"] and not params["draft"]
    )

    assert (check_releases([
        {"tag_name": "0.1.0",
         "draft": params["draft"],
         "prerelease": params["prerelease"]}
    ]))

    # Edit release
    ghr.gh_release_edit(REPO_NAME, "0.1.0", tag_name="0.1.0-edited")

    assert (check_releases([
        {"tag_name": "0.1.0-edited",
         "draft": params["draft"],
         "prerelease": params["prerelease"]},
    ]))
Esempio n. 2
0
def esgf_upload(starting_directory,
                build_list,
                name,
                upload_flag=False,
                prerelease_flag=False,
                dryrun=False):
    """Upload binaries to GitHub release as assets."""
    if upload_flag is None:
        upload_flag = query_for_upload()

    if not upload_flag:
        return

    if prerelease_flag:
        print "Marking as prerelease"

    print "build list in upload:", build_list
    for repo in build_list:
        print "repo:", repo
        os.chdir(os.path.join(starting_directory, repo))
        repo_handle = Repo(os.getcwd())
        print "active branch before upload:", repo_handle.active_branch
        latest_tag = get_latest_tag(repo_handle)
        print "latest_tag:", latest_tag

        if not name:
            release_name = latest_tag
        else:
            release_name = name

        published_releases = get_published_releases("ESGF/{}".format(repo))

        if latest_tag in published_releases:
            if not prerelease_flag:
                print "removing prerelease label"
                gh_release_edit("ESGF/{}".format(repo),
                                latest_tag,
                                prerelease=False)
                if is_prerelease("ESGF/{}".format(repo), latest_tag):
                    raise RuntimeError("Prerelease flag not removed")
            print "Updating the assets for the latest tag {}".format(
                latest_tag)
            gh_asset_upload("ESGF/{}".format(repo),
                            latest_tag,
                            "{}/{}/dist/*".format(starting_directory, repo),
                            dry_run=dryrun,
                            verbose=False)
        else:
            print "Creating release version {} for {}".format(latest_tag, repo)
            gh_release_create("ESGF/{}".format(repo),
                              "{}".format(latest_tag),
                              publish=True,
                              name=release_name,
                              prerelease=prerelease_flag,
                              dry_run=dryrun,
                              asset_pattern="{}/{}/dist/*".format(
                                  starting_directory, repo))

    print "Upload completed!"
def _upload_prerelease(args):
    # Set default prerelease name
    prerelease_name = args.prerelease_name
    if prerelease_name is None:
        prerelease_name = "%s (updated on %s)" % (
            args.prerelease_tag.title(), get_current_date()
        )
    # Create release
    gh_release_create(
        args.repo_name,
        args.prerelease_tag,
        name=prerelease_name,
        publish=True, prerelease=True
    )
    # Remove existing assets matching selected ones
    if args.re_upload:
        _delete_matching_packages(
            args.repo_name, args.prerelease_tag, args.prerelease_packages)
    # Upload packages
    gh_asset_upload(
        args.repo_name, args.prerelease_tag, args.prerelease_packages,
        args.dry_run
    )
    # Remove obsolete assets
    if args.prerelease_packages_clear_pattern is not None:
        gh_asset_delete(
            args.repo_name,
            args.prerelease_tag,
            args.prerelease_packages_clear_pattern,
            keep_pattern=args.prerelease_packages_keep_pattern,
            dry_run=args.dry_run
        )
    # If needed, update target commit
    sha = args.prerelease_sha
    if sha is not None:
        # If a branch name is specified, get associated commit
        refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha)
        if refs:
            assert len(refs) == 1
            branch = sha
            sha = refs[0]["object"]["sha"]
            print("resolved '%s' to '%s'" % (branch, sha))
        gh_release_edit(
            args.repo_name,
            args.prerelease_tag,
            target_commitish=sha
        )
    # If needed, update name associated with the release
    gh_release_edit(
        args.repo_name,
        args.prerelease_tag,
        name=prerelease_name
    )

    _cancel_additional_appveyor_builds(args.prerelease_tag)

    return True
def test_edit_target_commitish(gh_src_dir, release_type):
    cases = {
        'draft': {
            "draft": True,
            "prerelease": False
        },
        'prerelease': {
            "draft": False,
            "prerelease": True
        },
        'release': {
            "draft": False,
            "prerelease": False
        }
    }

    params = cases[release_type]

    with push_dir(gh_src_dir):
        sha = do_commit()  # 2017-01-02
        do_commit(push=True)  # 2017-01-03

        # Create release
        ghr.gh_release_create(REPO_NAME,
                              "0.1.0",
                              prerelease=params["prerelease"],
                              publish=not params["prerelease"]
                              and not params["draft"])

        run("git fetch origin")
        run("git fetch origin --tags")

        assert (check_releases([{
            "tag_name": "0.1.0",
            "draft": params["draft"],
            "prerelease": params["prerelease"],
            "tag_date": "20170103"
        }]))

        # Edit release
        ghr.gh_release_edit(REPO_NAME, "0.1.0", target_commitish=sha)

        run("git fetch origin")
        run("git fetch origin --tags")

        assert (check_releases([{
            "tag_name": "0.1.0",
            "draft": params["draft"],
            "prerelease": params["prerelease"],
            "tag_date": "20170102"
        }]))
def test_edit_release_type(from_release_type, to_release_type):
    clear_github_release_and_tags()

    cases = {
        'draft': {
            "draft": True,
            "prerelease": False
        },
        'prerelease': {
            "draft": False,
            "prerelease": True
        },
        'release': {
            "draft": False,
            "prerelease": False
        }
    }

    if from_release_type == to_release_type:
        pytest.skip("from_release_type is identical to "
                    "to_release_type: %s" % to_release_type)

    from_params = cases[from_release_type]
    to_params = cases[to_release_type]

    # Create release
    ghr.gh_release_create(REPO_NAME,
                          "0.1.0",
                          prerelease=from_params["prerelease"],
                          publish=not from_params["prerelease"]
                          and not from_params["draft"])

    assert (check_releases([{
        "tag_name": "0.1.0",
        "draft": from_params["draft"],
        "prerelease": from_params["prerelease"]
    }]))

    # Edit release
    ghr.gh_release_edit(REPO_NAME, "0.1.0", **to_params)

    assert (check_releases([
        {
            "tag_name": "0.1.0",
            "draft": to_params["draft"],
            "prerelease": to_params["prerelease"]
        },
    ]))
def test_edit_name_and_body(release_type):
    clear_github_release_and_tags()

    cases = {
        'draft': {
            "draft": True,
            "prerelease": False
        },
        'prerelease': {
            "draft": False,
            "prerelease": True
        },
        'release': {
            "draft": False,
            "prerelease": False
        }
    }

    params = cases[release_type]

    # Create release
    ghr.gh_release_create(REPO_NAME,
                          "0.1.0",
                          prerelease=params["prerelease"],
                          publish=not params["prerelease"]
                          and not params["draft"])

    assert (check_releases([{
        "tag_name": "0.1.0",
        "draft": params["draft"],
        "prerelease": params["prerelease"]
    }]))

    # Edit release
    ghr.gh_release_edit(REPO_NAME,
                        "0.1.0",
                        name="name-edited",
                        body="body-edited")

    assert (check_releases([
        {
            "tag_name": "0.1.0",
            "draft": params["draft"],
            "prerelease": params["prerelease"],
            "name": "name-edited",
            "body": "body-edited"
        },
    ]))
def test_edit_target_commitish(gh_src_dir, release_type):
    cases = {
        'draft': {"draft": True, "prerelease": False},
        'prerelease': {"draft": False, "prerelease": True},
        'release': {"draft": False, "prerelease": False}
    }

    params = cases[release_type]

    with push_dir(gh_src_dir):
        sha = do_commit()  # 2017-01-02
        do_commit(push=True)  # 2017-01-03

        # Create release
        ghr.gh_release_create(
            REPO_NAME, "0.1.0",
            prerelease=params["prerelease"],
            publish=not params["prerelease"] and not params["draft"]
        )

        run("git fetch origin")
        run("git fetch origin --tags")

        assert (check_releases([
            {"tag_name": "0.1.0",
             "draft": params["draft"],
             "prerelease": params["prerelease"],
             "tag_date": "20170103"}
        ]))

        # Edit release
        ghr.gh_release_edit(
            REPO_NAME, "0.1.0",
            target_commitish=sha
        )

        run("git fetch origin")
        run("git fetch origin --tags")

        assert (check_releases([
            {"tag_name": "0.1.0",
             "draft": params["draft"],
             "prerelease": params["prerelease"],
             "tag_date": "20170102"
             }
        ]))
def test_edit_release_type(from_release_type, to_release_type):
    clear_github_release_and_tags()

    cases = {
        'draft': {"draft": True, "prerelease": False},
        'prerelease': {"draft": False, "prerelease": True},
        'release': {"draft": False, "prerelease": False}
    }

    if from_release_type == to_release_type:
        pytest.skip("from_release_type is identical to "
                    "to_release_type: %s" % to_release_type)

    from_params = cases[from_release_type]
    to_params = cases[to_release_type]

    # Create release
    ghr.gh_release_create(
        REPO_NAME, "0.1.0",
        prerelease=from_params["prerelease"],
        publish=not from_params["prerelease"] and not from_params["draft"]
    )

    assert (check_releases([
        {"tag_name": "0.1.0",
         "draft": from_params["draft"],
         "prerelease": from_params["prerelease"]}
    ]))

    # Edit release
    ghr.gh_release_edit(
        REPO_NAME, "0.1.0", **to_params
    )

    assert (check_releases([
        {"tag_name": "0.1.0",
         "draft": to_params["draft"],
         "prerelease": to_params["prerelease"]},
    ]))
Esempio n. 9
0
    def make_ppa(self, sdist_match, tmpdir, tag):
        """Build, sign and upload dsc to launchpad.net ppa from sdist.tar.gz"""
        repo = self.repo

        with ChdirTemporaryDirectory() as ppa_tmpdir:
            sdist_name = sdist_match.group(0)
            version = sdist_match.group(1)
            ppa_upstr_version = pep440_to_deb(version)
            ppa_upstream_suffix = self.ppa_upstream_suffix
            if ppa_upstream_suffix:
                ppa_upstr_version += ('+%s' % ppa_upstream_suffix)
            ppa_orig_name = PPA_ORIG_NAME_TEMPLATE.format(
                version=ppa_upstr_version)
            series = list(map(lambda x: x[0],
                sorted(PPA_SERIES.items(), key=lambda x: x[1])))
            sdist_dir = SDIST_DIR_TEMPLATE.format(version=version)
            sdist_dir = os.path.join(ppa_tmpdir, sdist_dir)
            debian_dir = os.path.join(sdist_dir, 'debian')
            changelog_name = os.path.join(debian_dir, 'changelog')
            relnotes_name = os.path.join(sdist_dir, 'RELEASE-NOTES')

            print('Found sdist: %s, version: %s' % (sdist_name, version))
            print('  Copying sdist to %s, extracting' % ppa_orig_name)
            shutil.copy(os.path.join(tmpdir, sdist_name),
                  os.path.join(ppa_tmpdir, ppa_orig_name))
            check_call(['tar', '-xzvf', ppa_orig_name], stdout=FNULL)

            with open(relnotes_name, 'r') as rnfd:
                changes = rnfd.read()
                changes_match = REL_NOTES_PATTERN.match(changes)
                if changes_match and len(changes_match.group(1)) > 0:
                    changes = changes_match.group(1).split('\n')
                    for i in range(len(changes)):
                        if changes[i] == '':
                            continue
                        elif changes[i][0] != ' ':
                            changes[i] = '  %s' % changes[i]
                        elif len(changes[i]) > 1 and changes[i][1] != ' ':
                            changes[i] = ' %s' % changes[i]
                    changes = '\n'.join(changes)
                else:
                    changes = '\n  * Porting to ppa\n\n'

            if not self.dry_run:
                gh_release_edit(repo, tag, name=version)
                gh_release_edit(repo, tag, body=changes)

            os.chdir(sdist_dir)
            print('  Making PPAs for series: %s' % (', '.join(series)))
            now_formatted = strftime('%a, %d %b %Y %H:%M:%S %z', localtime())
            for s in series:
                ppa_num = get_next_ppa_num(self.ppa, PPA_SOURCE_NAME,
                                           ppa_upstr_version, s)
                rel_version = PPA_SERIES[s]
                ppa_version = '%s-0ppa%s~ubuntu%s' % (ppa_upstr_version,
                                                      ppa_num, rel_version)
                ppa_dsc = os.path.join(ppa_tmpdir, PPA_FILES_TEMPLATE.format(
                    ppa_version, '.dsc'))
                ppa_chgs = os.path.join(ppa_tmpdir, PPA_FILES_TEMPLATE.format(
                    ppa_version, '_source.changes'))
                changelog = CHANGELOG_TEMPLATE.format(ppa_version=ppa_version,
                                                      series=s,
                                                      changes=changes,
                                                      uid=self.uid,
                                                      time=now_formatted)

                with open(changelog_name, 'w') as chlfd:
                    chlfd.write(changelog)

                print('  Make %s ppa, Signing with key: %s, %s' %
                    (ppa_version, self.keyid, self.uid))
                if self.verbose:
                    check_call(['debuild', '-S'])
                else:
                    check_call(['debuild', '-S'], stdout=FNULL)
                print('  Upload %s ppa to %s' % (ppa_version, self.ppa))
                if self.dry_run:
                    print('  Dry run:  dput ppa:%s %s' % (self.ppa, ppa_chgs))
                else:
                    check_call(['dput', ('ppa:%s' % self.ppa), ppa_chgs],
                         stdout=FNULL)
                print('\n')
Esempio n. 10
0
def upload(repo_name, root_dir, incoming_dir, hashalgo, github_token=None):
    """Upload incoming files associated them with hashalgo release."""

    if github_token:
        github_release._github_token_cli_arg = github_token

    hashcmd = get_hashcmd(hashalgo)
    if not hashcmd:
        raise ValueError('hashalgo "' + hashalgo + '" not found')

    if not os.path.isdir(incoming_dir):
        raise ValueError("Missing " + incoming_dir + " directory")

    hashalgo_dir = os.path.join(root_dir, hashalgo)
    if not os.path.isdir(hashalgo_dir):
        os.mkdir(hashalgo_dir)

    # Download information about current release

    # Get current fileindex
    try:
        hashalgo_csv = download_fileindex_csv(repo_name, hashalgo_dir,
                                              hashalgo, github_token)
        fileindex = read_fileindex_csv(hashalgo_csv)
    except ValueError:
        # New release
        hashalgo_csv = os.path.join(hashalgo_dir, hashalgo + ".csv")
        fileindex = []

    # Get list of successfully uploaded assets (to avoid uploading them again)
    # and delete partially uploaded ones.
    uploaded_assets = (github_release.get_assets(repo_name, hashalgo)
                       if fileindex else [])
    uploaded_hashes = []
    for asset in uploaded_assets:
        if asset["state"] == "uploaded":
            uploaded_hashes.append(asset["name"])
        else:
            # Remove asset partially uploaded
            github_release.gh_asset_delete(repo_name, hashalgo, asset["name"])

    # Update release information with incoming data

    # Add incoming files to fileindex and hashalgo_dir
    filenames = [
        f for f in os.listdir(incoming_dir) if
        os.path.isfile(os.path.join(incoming_dir, f)) and not f.startswith(".")
    ]
    for filename in filenames:
        filepath = os.path.join(incoming_dir, filename)
        checksum = hashcmd(filepath)
        try:
            fileindex.index([checksum, filename])
        except ValueError:
            # new item
            fileindex.append([checksum, filename])
        # Make sure the hash-named file is present
        hashfilepath = os.path.join(hashalgo_dir, checksum)
        if not os.path.isfile(hashfilepath):
            copyfile(filepath, hashfilepath)

    # Create new hashalgo.csv from existing and incoming files
    fileindex.sort(key=lambda a: (a[1].casefold(), a[0]))
    write_fileindex_csv(hashalgo_csv, fileindex)
    hashalgo_md = os.path.join(root_dir, hashalgo_dir, hashalgo + ".md")
    write_fileindex_md(hashalgo_md, fileindex, repo_name, hashalgo)

    # Upload updated releaes info and new data files

    # Create hashalgo release (in case it does not exist)
    github_release.gh_release_create(repo_name, hashalgo, publish=True)

    # Delete old hashalgo.csv and hashalgo.md
    github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".csv")
    github_release.gh_asset_delete(repo_name, hashalgo, hashalgo + ".md")

    # Upload new hashalgo.csv and hashalgo.md
    github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_csv)
    github_release.gh_asset_upload(repo_name, hashalgo, hashalgo_md)

    # Upload new data files
    for [checksum, filename] in fileindex:
        if checksum in uploaded_hashes:
            # already uploaded
            continue
        filepath = os.path.join(hashalgo_dir, checksum)
        github_release.gh_asset_upload(repo_name, hashalgo, filepath)

    # Copy md file content into release notes
    with open(hashalgo_md, "r") as file:
        release_notes = file.read()
    github_release.gh_release_edit(repo_name, hashalgo, body=release_notes)
Esempio n. 11
0
def _upload_prerelease(args):
    # Set default prerelease name
    prerelease_name = args.prerelease_name
    if prerelease_name is None:
        prerelease_name = "%s (updated on %s)" % (args.prerelease_tag.title(),
                                                  get_current_date())
    # Create release
    gh_release_create(args.repo_name,
                      args.prerelease_tag,
                      name=prerelease_name,
                      publish=True,
                      prerelease=True)

    packages = _collect_packages(args.prerelease_packages)

    # Remove existing assets matching selected ones
    if args.re_upload:
        _delete_matching_packages(args.repo_name, args.prerelease_tag,
                                  packages)
    else:
        # or skip upload of existing packages
        matching_packages = _get_matching_packages(args.repo_name,
                                                   args.prerelease_tag,
                                                   packages)
        for matching_package in matching_packages:
            if matching_package in packages:
                print("skipping %s package "
                      "(already uploaded)" % matching_package)
                packages.remove(matching_package)
        if matching_packages:
            print("")

    # Upload packages
    gh_asset_upload(args.repo_name, args.prerelease_tag, packages,
                    args.dry_run)
    # Remove obsolete assets
    if args.prerelease_packages_clear_pattern is not None:
        gh_asset_delete(args.repo_name,
                        args.prerelease_tag,
                        args.prerelease_packages_clear_pattern,
                        keep_pattern=args.prerelease_packages_keep_pattern,
                        dry_run=args.dry_run)
    # If needed, update target commit
    sha = args.prerelease_sha
    if sha is not None:
        # If a branch name is specified, get associated commit
        refs = get_refs(args.repo_name, pattern="refs/heads/%s" % sha)
        if refs:
            assert len(refs) == 1
            branch = sha
            sha = refs[0]["object"]["sha"]
            print("resolved '%s' to '%s'" % (branch, sha))
        # Check that sha exists
        if gh_commit_get(args.repo_name, sha) is None:
            raise ValueError(
                "Failed to get commit associated with --prerelease-sha: %s" %
                sha)

        gh_release_edit(args.repo_name,
                        args.prerelease_tag,
                        target_commitish=sha)

    # Set a draft first, and switch to prerelease afterward so that
    # the release date is current.
    gh_release_edit(
        args.repo_name,
        args.prerelease_tag,
        draft=True,
    )
    # Update draft, prerelease and name properties.
    gh_release_edit(args.repo_name,
                    args.prerelease_tag,
                    name=prerelease_name,
                    draft=False,
                    prerelease=True)

    _cancel_additional_appveyor_builds(args.prerelease_tag)

    return True
Esempio n. 12
0
    def make_ppa(self, sdist_match, tmpdir, tag):
        """Build, sign and upload dsc to launchpad.net ppa from sdist.tar.gz"""
        repo = self.repo

        with ChdirTemporaryDirectory() as ppa_tmpdir:
            sdist_name = sdist_match.group(0)
            version = sdist_match.group(1)
            ppa_upstr_version = pep440_to_deb(version)
            ppa_upstream_suffix = self.ppa_upstream_suffix
            if ppa_upstream_suffix:
                ppa_upstr_version += ('+%s' % ppa_upstream_suffix)
            ppa_orig_name = PPA_ORIG_NAME_TEMPLATE.format(
                version=ppa_upstr_version)
            series = list(map(lambda x: x[0],
                sorted(PPA_SERIES.items(), key=lambda x: x[1])))
            sdist_dir = SDIST_DIR_TEMPLATE.format(version=version)
            sdist_dir = os.path.join(ppa_tmpdir, sdist_dir)
            debian_dir = os.path.join(sdist_dir, 'debian')
            changelog_name = os.path.join(debian_dir, 'changelog')
            relnotes_name = os.path.join(sdist_dir, 'RELEASE-NOTES')

            print('Found sdist: %s, version: %s' % (sdist_name, version))
            print('  Copying sdist to %s, extracting' % ppa_orig_name)
            shutil.copy(os.path.join(tmpdir, sdist_name),
                  os.path.join(ppa_tmpdir, ppa_orig_name))
            check_call(['tar', '-xzvf', ppa_orig_name], stdout=FNULL)

            with open(relnotes_name, 'r') as rnfd:
                changes = rnfd.read()
                changes_match = REL_NOTES_PATTERN.match(changes)
                if changes_match and len(changes_match.group(1)) > 0:
                    changes = changes_match.group(1).split('\n')
                    for i in range(len(changes)):
                        if changes[i] == '':
                            continue
                        elif changes[i][0] != ' ':
                            changes[i] = '  %s' % changes[i]
                        elif len(changes[i]) > 1 and changes[i][1] != ' ':
                            changes[i] = ' %s' % changes[i]
                    changes = '\n'.join(changes)
                else:
                    changes = '\n  * Porting to ppa\n\n'

            if not self.dry_run:
                gh_release_edit(repo, tag, name=version)
                gh_release_edit(repo, tag, body=changes)

            os.chdir(sdist_dir)
            print('  Making PPAs for series: %s' % (', '.join(series)))
            now_formatted = strftime('%a, %d %b %Y %H:%M:%S %z', localtime())
            for s in series:
                ppa_num = get_next_ppa_num(self.ppa, PPA_SOURCE_NAME,
                                           ppa_upstr_version, s)
                rel_version = PPA_SERIES[s]
                ppa_version = '%s-0ppa%s~ubuntu%s' % (ppa_upstr_version,
                                                      ppa_num, rel_version)
                ppa_dsc = os.path.join(ppa_tmpdir, PPA_FILES_TEMPLATE.format(
                    ppa_version, '.dsc'))
                ppa_chgs = os.path.join(ppa_tmpdir, PPA_FILES_TEMPLATE.format(
                    ppa_version, '_source.changes'))
                changelog = CHANGELOG_TEMPLATE.format(ppa_version=ppa_version,
                                                      series=s,
                                                      changes=changes,
                                                      uid=self.uid,
                                                      time=now_formatted)

                with open(changelog_name, 'w') as chlfd:
                    chlfd.write(changelog)

                print('  Make %s ppa, Signing with key: %s, %s' %
                    (ppa_version, self.keyid, self.uid))
                if self.verbose:
                    check_call(['debuild', '-S'])
                else:
                    check_call(['debuild', '-S'], stdout=FNULL)
                print('  Upload %s ppa to %s' % (ppa_version, self.ppa))
                if self.dry_run:
                    print('  Dry run:  dput ppa:%s %s' % (self.ppa, ppa_chgs))
                else:
                    check_call(['dput', ('ppa:%s' % self.ppa), ppa_chgs],
                         stdout=FNULL)
                print('\n')