Ejemplo n.º 1
0
def main(argv):
    pars = get_parser()

    opts = argparse.Namespace()
    pars_config(opts, 'setup.cfg')

    opts = pars.parse_args(argv, namespace=opts)
    logger.info(f'Final namespace: {opts}')

    tag = os.getenv(opts.tagvar, '')
    if not tag:
        logger.error(f'No tag found for {opts.tagvar}')
        return 1

    logger.info(f'envar {opts.tagvar} resolved to {tag}')
    nicetag = tag
    pkgname = None
    if '@' in tag:
        logger.info('@ delimited tag, splitting into name and tag part')
        pkgname, nicetag = tag.split('@')
        logger.info(f'Got pkgname={pkgname} @ nicetag={nicetag}')
    m = re.search(SEMVER_RE, nicetag)
    if not m:
        logger.error(f'nicetag={nicetag} does not match semver regex')
        return 1
    is_prerelease = False
    if m.groupdict().get('pre'):
        is_prerelease = True

    defvalu = ''
    if not opts.dryrun:
        envd = os.getenv('VTX_COMMON_DRYRUN', None)
        if envd is not None:
            logger.info(
                'Setting opts.dryrun to True from environment variable')
            opts.dryrun = True
    if opts.dryrun:
        defvalu = 'DRYRUN'

    gh_token = os.getenv(opts.gittokenvar, defvalu)
    if not gh_token:
        logger.error('No github token found')
        return 1
    gh_username = os.getenv(opts.gituservar, defvalu)
    if not gh_username:
        logger.error('No github user found')
        return 1
    gh_repo = os.getenv(opts.gitrepovar, defvalu)
    if not gh_repo:
        logger.error('No github repo found')
        return 1

    extra_parts = []

    pfile = opts.storm_pkg_file
    mtyp = opts.storm_pkg_type
    if pfile and mtyp:

        if opts.storm_pkg_file_pkgname:
            assert pkgname is not None
            logger.info('Injecting pkgname to pkg path')
            pfile = pfile.format(pkgname=pkgname)

        if opts.storm_pkg_type_pkgname:
            assert pkgname is not None
            logger.info('Injecting pkgname to pkg mtype')
            mtyp = mtyp.format(pkgname=pkgname)

        logger.info(f'Getting storm package from {pfile}')
        pkg = v_gpsm.yamlload(pfile)
        minv_message = v_gpsm.getMessageFromPkg(pkg, mtyp)
        logger.info(f'Got message: {minv_message}')
        extra_parts.append(minv_message)

    extra_lines = opts.extra_lines
    if extra_lines:
        logger.info(f'Extra lines found: {extra_lines}')
        extra_parts.append(extra_lines)

    # Join extra lines together
    extra_lines = '\n'.join(extra_parts)

    changelog_fp = opts.changelog
    if opts.changelog_pkgname:
        assert pkgname is not None
        logger.info('Injecting pkgname to changelog path')
        changelog_fp = changelog_fp.format(pkgname=pkgname)

    assert os.path.isfile(changelog_fp)
    raw_changelog = open(changelog_fp, 'rb').read().decode()
    parsed_logs = parse_changelog(raw_changelog)

    target_log = parsed_logs.get(nicetag)
    if not target_log:
        logger.error(f'Unable to find logs for tag [{nicetag}]')
        # It's possible for pre-release tags to end up without a changelog.
        # This condition should not end up failing a CI pipeline.
        return 0
    logger.info(f'Found changelogs for [{nicetag}] in [{changelog_fp}]')

    if opts.remove_urls:
        logger.info('Removing URLs')
        target_log = remove_urls(target_log)

    # join logs together and strip them
    target_log = '\n'.join(target_log)
    target_log = target_log.strip()

    if extra_lines:
        logger.info(f'Appending extra line data')
        target_log = '\n'.join([target_log, '', extra_lines])
        # remove trailing data if present in extra_lines
        target_log = target_log.strip()

    logger.info('Final Log:')
    for line in target_log.split('\n'):
        logger.debug(line)

    name = nicetag
    if opts.release_name:
        name = f'{opts.release_name} {nicetag}'
    if opts.release_name_pkgname:
        logger.info(f'Injecting pkgname into [{name}]')
        name = name.format(pkgname=pkgname)
        logger.info(f'Name is now [{name}]')

    logger.info(f'Release Name: [{name}]')
    gh_repo_path = f'{gh_username}/{gh_repo}'

    if opts.dryrun:
        logger.info(
            'Dry-run mode enabled. Not performing a Github release action.')
        logger.info('Would have made release with the following information:')
        logger.info(f'gh_repo_path={gh_repo_path}')
        logger.info(f'tag={tag}')
        logger.info(f'name={name}')
        logger.info(f'prerelease={is_prerelease}')
        logger.info(f'message={target_log}')
        return 0

    gh = github.Github(gh_token)

    logger.info(f'Getting github repo for {gh_repo_path}')
    repo = gh.get_repo(gh_repo_path)

    logger.info('Making github release')
    release = repo.create_git_release(
        tag=tag,
        name=name,
        draft=False,
        message=target_log,
        prerelease=is_prerelease,
    )
    logger.info(f'Made github release {release}')

    return 0
Ejemplo n.º 2
0
def pr_detailed_comment(org_name, repo_name, pr_owner, pr_repo, pr_branch,
                        pr_num, comment):
    is_staged_recipes = (repo_name == "staged-extensions")
    if not (repo_name.endswith("-feedstock") or is_staged_recipes):
        return

    if not is_staged_recipes and UPDATE_CIRCLECI_KEY_MSG.search(comment):
        update_circle(org_name, repo_name)

        gh = github.Github(os.environ['GH_TOKEN'])
        repo = gh.get_repo("{}/{}".format(org_name, repo_name))
        pull = repo.get_pull(int(pr_num))
        message = textwrap.dedent("""
                Hi! This is the friendly automated nwb-extensions-webservice.

                I just wanted to let you know that I updated the circle-ci deploy key and followed the project.
                """)
        pull.create_issue_comment(message)

    pr_commands = [LINT_MSG]
    if not is_staged_recipes:
        pr_commands += [RERENDER_MSG]

    if not any(command.search(comment) for command in pr_commands):
        return

    with tmp_directory() as tmp_dir:
        print(tmp_dir, repo_name)
        feedstock_dir = os.path.join(tmp_dir, repo_name)
        repo_url = "https://{}@github.com/{}/{}.git".format(
            os.environ['GH_TOKEN'], pr_owner, pr_repo)
        repo = Repo.clone_from(repo_url, feedstock_dir, branch=pr_branch)

        if LINT_MSG.search(comment):
            relint(org_name, repo_name, pr_num)

        changed_anything = False
        rerender_error = False
        expected_changes = []
        if not is_staged_recipes:
            do_rerender = False
            if RERENDER_MSG.search(comment):
                do_rerender = True
                expected_changes.append('re-render')

            if do_rerender:
                try:
                    changed_anything |= rerender(repo)
                except RuntimeError:
                    rerender_error = True

        if expected_changes:
            if len(expected_changes) > 1:
                expected_changes[-1] = 'and ' + expected_changes[-1]
            joiner = ", " if len(expected_changes) > 2 else " "
            changes_str = joiner.join(expected_changes)

            gh = github.Github(os.environ['GH_TOKEN'])
            gh_repo = gh.get_repo("{}/{}".format(org_name, repo_name))
            pull = gh_repo.get_pull(int(pr_num))

            if changed_anything:
                try:
                    repo.remotes.origin.push()
                except GitCommandError:
                    message = textwrap.dedent("""
                        Hi! This is the friendly automated nwb-extensions-webservice.

                        I tried to {} for you, but it looks like I wasn't able to push to the {} branch of {}/{}. Did you check the "Allow edits from maintainers" box?
                        """).format(pr_branch, pr_owner, pr_repo,
                                    changes_str)  # noqa: E501
                    pull.create_issue_comment(message)
            else:
                if rerender_error:
                    message = textwrap.dedent("""
                        Hi! This is the friendly automated nwb-extensionse-webservice.

                        I tried to {} for you but ran into some issues, please ping nwb-extensions/core for further assistance.
                        """).format(changes_str)  # noqa: E501
                else:
                    message = textwrap.dedent("""
                        Hi! This is the friendly automated nwb-extensions-webservice.

                        I tried to {} for you, but it looks like there was nothing to do.
                        """).format(changes_str)
                pull.create_issue_comment(message)
def pr_detailed_comment(org_name, repo_name, pr_owner, pr_repo, pr_branch, pr_num, comment):
    is_staged_recipes = (repo_name == "staged-recipes")
    if not (repo_name.endswith("-feedstock") or is_staged_recipes):
        return

    if not is_staged_recipes and UPDATE_CIRCLECI_KEY_MSG.search(comment):
        update_circle(org_name, repo_name)

        gh = github.Github(os.environ['GH_TOKEN'])
        repo = gh.get_repo("{}/{}".format(org_name, repo_name))
        pull = repo.get_pull(int(pr_num))
        message = textwrap.dedent("""
                Hi! This is the friendly automated conda-forge-webservice.

                I just wanted to let you know that I updated the circle-ci deploy key and followed the project.
                """)
        pull.create_issue_comment(message)

    pr_commands = [LINT_MSG]
    if not is_staged_recipes:
        pr_commands += [ADD_NOARCH_MSG, RERENDER_MSG, UPDATE_CB3_MSG]

    if not any(command.search(comment) for command in pr_commands):
        return

    with tmp_directory() as tmp_dir:
        feedstock_dir = os.path.join(tmp_dir, repo_name)
        repo_url = "https://{}@github.com/{}/{}.git".format(
            os.environ['GH_TOKEN'], pr_owner, pr_repo)
        repo = Repo.clone_from(repo_url, feedstock_dir, branch=pr_branch)

        if LINT_MSG.search(comment):
            relint(org_name, repo_name, pr_num)

        changed_anything = False
        expected_changes = []
        extra_msg = ''
        if not is_staged_recipes:
            do_noarch = do_cb3 = do_rerender = False
            if ADD_NOARCH_MSG.search(comment):
                do_noarch = do_rerender = True
                expected_changes.append('add noarch')
            if UPDATE_CB3_MSG.search(comment):
                do_cb3 = do_rerender = True
                expected_changes.append('update for conda-build 3')
            if RERENDER_MSG.search(comment):
                do_rerender = True
                expected_changes.append('re-render')

            if do_noarch:
                changed_anything |= make_noarch(repo)

            if do_cb3:
                c, cb3_changes = update_cb3(repo)
                changed_anything |= c
                extra_msg += '\n\n' + cb3_changes

            if do_rerender:
                changed_anything |= rerender(repo)

        if expected_changes:
            if len(expected_changes) > 1:
                expected_changes[-1] = 'and ' + expected_changes[-1]
            joiner = ", " if len(expected_changes) > 2 else " "
            changes_str = joiner.join(expected_changes)

            gh = github.Github(os.environ['GH_TOKEN'])
            gh_repo = gh.get_repo("{}/{}".format(org_name, repo_name))
            pull = gh_repo.get_pull(int(pr_num))

            if changed_anything:
                try:
                    repo.remotes.origin.push()
                except GitCommandError:
                    message = textwrap.dedent("""
                        Hi! This is the friendly automated conda-forge-webservice.

                        I tried to {} for you, but it looks like I wasn't able to push to the {} branch of {}/{}. Did you check the "Allow edits from maintainers" box?
                        """).format(pr_branch, pr_owner, pr_repo, changes_str)
                    pull.create_issue_comment(message)
            else:
                message = textwrap.dedent("""
                    Hi! This is the friendly automated conda-forge-webservice.

                    I tried to {} for you, but it looks like there was nothing to do.
                    """).format(changes_str)
                pull.create_issue_comment(message)
Ejemplo n.º 4
0
parser.add_argument(
    '--source-ref',
    required=True,
    help=
    'The git version for the source. Used to retry failed imports without bumping versions.'
)
parser.add_argument('--dest',
                    required=True,
                    help='The destination rosdistro name')
parser.add_argument('--release-org',
                    required=True,
                    help='The organization containing release repositories')

args = parser.parse_args()

gclient = github.Github(os.environ['GITHUB_TOKEN'])
release_org = gclient.get_organization(args.release_org)
org_release_repos = [r.name for r in release_org.get_repos() if r.name]

if not os.path.isfile('index-v4.yaml'):
    raise RuntimeError(
        'This script must be run from a rosdistro index directory.')
rosdistro_dir = os.path.abspath(os.getcwd())
rosdistro_index_url = f'file://{rosdistro_dir}/index-v4.yaml'

index = get_index(rosdistro_index_url)
index_yaml = yaml.safe_load(open('index-v4.yaml', 'r'))

if len(index_yaml['distributions'][args.source]['distribution']) != 1 or \
        len(index_yaml['distributions'][args.dest]['distribution']) != 1:
    raise RuntimeError(
Ejemplo n.º 5
0
 def testGetOAuthScopesFromHeader(self):
     g = github.Github(self.oauth_token)
     self.assertEqual(g.oauth_scopes, None)
     g.get_user().name
     self.assertEqual(g.oauth_scopes, ['repo', 'user', 'gist'])
Ejemplo n.º 6
0
def api_request_loop(state):
    g = github.Github(ACCESS_TOKEN)

    print("Collecting Github Search API data...")
    try:
        repositories = g.search_code(state.query)

        state.totalCount = repositories.totalCount

        # Hack to backfill PaginatedList with garbage to avoid ratelimiting on
        # restore, library fetches in 30 counts
        repositories.__dict__["_PaginatedListBase__elements"] = [None] * (
            state.index // 30) * 30
        state.lastInitIndex = state.index

        print(bcolors.CLEAR)

        i = state.index
        stepBack = False
        while i < state.totalCount:
            while True:
                try:
                    state.index = i

                    # Manually fill Paginator to avoid ratelimiting on restore
                    repositories = pagination_hack(repositories, state)

                    repo = repositories[i]

                    log_buf = "https://github.com/" + \
                        bcolors.OKGREEN + repo.repository.owner.login + "/" + \
                        bcolors.OKBLUE + repo.repository.name + "/blob" + \
                        bcolors.ENDC + \
                        os.path.dirname(repo.html_url.split('blob')[1]) + \
                        "/" + bcolors.WARNING + repo.name + bcolors.ENDC
                    print(log_buf)
                    log_buf = "\n" + log_buf + "\n"

                    if should_parse(repo, state) or stepBack:
                        stepBack = False
                        log_buf += regex_search(state.checks, repo)
                        ui_loop(repo, repositories, log_buf, state)
                        if state.index < i:
                            i = state.index
                            stepBack = True
                        print(bcolors.CLEAR)
                    else:
                        print("Skipping...")
                    i += 1
                    break
                except github.RateLimitExceededException:
                    try:
                        print(bcolors.FAIL + "RateLimitException: "
                              "Please wait about 30 seconds before you "
                              "try again, or exit (CTRL-C).\n " + bcolors.ENDC)
                        save_state("ratelimited", repositories, state)
                        input("Press enter to try again...")
                    except KeyboardInterrupt:
                        sys.exit(1)
    except github.RateLimitExceededException:
        print(bcolors.FAIL + "RateLimitException: "
              "Please wait about 30 seconds before you try again.\n" +
              bcolors.ENDC)
        save_state("ratelimited", repositories, state)
        sys.exit(-1)
Ejemplo n.º 7
0
# The master toctree document.
master_doc = 'index'

# General information about the project.
project = 'Parselmouth'
copyright = '2017-2020, Yannick Jadoul'
author = 'Yannick Jadoul'

if on_rtd:
    rtd_version = os.environ.get('READTHEDOCS_VERSION')
    branch = 'master' if rtd_version == 'latest' else rtd_version

    github_token = os.environ['GITHUB_TOKEN']
    head_sha = git.Repo(search_parent_directories=True).head.commit.hexsha
    g = github.Github()
    runs = g.get_repo('YannickJadoul/Parselmouth').get_workflow(
        "wheels.yml").get_runs(branch=branch)
    artifacts_url = next(r for r in runs
                         if r.head_sha == head_sha).artifacts_url

    archive_download_url = next(
        artifact
        for artifact in requests.get(artifacts_url).json()['artifacts']
        if artifact['name'] == 'rtd-wheel')['archive_download_url']
    artifact_bin = io.BytesIO(
        requests.get(archive_download_url,
                     headers={
                         'Authorization': f'token {github_token}'
                     },
                     stream=True).content)
Ejemplo n.º 8
0
def _database():
    gh = github.Github(flask.current_app.config['GITHUB_TOKEN'])
    return githubdb.GithubDB(gh)
Ejemplo n.º 9
0
                        merge_method))
            post_parameters["merge_method"] = merge_method
        ## End custom code ##

        headers, data = self._requester.requestJsonAndCheck(
            "PUT", self.url + "/merge", input=post_parameters)
        return github.PullRequestMergeStatus.PullRequestMergeStatus(
            self._requester, headers, data, completed=True)


github.PullRequest.PullRequest = ExtendedPullRequest

# Initialize GitHub client. For documentation,
# see http://pygithub.github.io/PyGithub/v1/reference.html.
github_access_token = os.environ['GITHUB_ACCESS_TOKEN']
edx = github.Github(github_access_token).get_organization('edx')


class Repo:
    # Make 18 attempts to merge the PR (the original attempt plus 17 retries), sleeping for 5 minutes between each
    # attempt. This should result in a total 90 minutes of sleep time. Status checks should almost always
    # complete in this period.
    MAX_MERGE_RETRIES = 17
    """Utility representing a Git repo."""
    def __init__(self,
                 clone_url,
                 repo_owner,
                 branch_name,
                 message,
                 merge_method=DEFAULT_MERGE_METHOD):
        # See https://github.com/blog/1270-easier-builds-and-deployments-using-git-over-https-and-oauth.
Ejemplo n.º 10
0
def get_github_instance():
    g = github.Github(settings.GITHUB_ACCESS_TOKEN)
    return g
Ejemplo n.º 11
0
    def setUp(self):
        super(FunctionalTestBase, self).setUp()
        self.existing_labels = []
        self.pr_counter = 0
        self.git_counter = 0
        self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE,
                                                 self.__class__.__name__,
                                                 self._testMethodName)

        # Recording stuffs
        if RECORD:
            if os.path.exists(self.cassette_library_dir):
                shutil.rmtree(self.cassette_library_dir)
            os.makedirs(self.cassette_library_dir)

        self.recorder = vcr.VCR(
            cassette_library_dir=self.cassette_library_dir,
            record_mode="all" if RECORD else "none",
            match_on=["method", "uri"],
            filter_headers=[
                ("Authorization", "<TOKEN>"),
                ("X-Hub-Signature", "<SIGNATURE>"),
                ("User-Agent", None),
                ("Accept-Encoding", None),
                ("Connection", None),
            ],
            before_record_response=self.response_filter,
            custom_patches=((pygithub.MainClass, "HTTPSConnection",
                             vcr.stubs.VCRHTTPSConnection), ),
        )

        if RECORD:
            github.CachedToken.STORAGE = {}
        else:
            # Never expire token during replay
            mock.patch.object(github_app,
                              "get_or_create_jwt",
                              return_value="<TOKEN>").start()
            mock.patch.object(
                github.GithubAppInstallationAuth,
                "get_access_token",
                return_value="<TOKEN>",
            ).start()

            # NOTE(sileht): httpx pyvcr stubs does not replay auth_flow as it directly patch client.send()
            # So anything occurring during auth_flow have to be mocked during replay
            def get_auth(owner=None, auth=None):
                if auth is None:
                    auth = github.get_auth(owner)
                    auth.installation = {
                        "id": config.INSTALLATION_ID,
                    }
                    auth.permissions_need_to_be_updated = False
                    auth.owner_id = config.TESTING_ORGANIZATION_ID
                return auth

            async def github_aclient(owner=None, auth=None):
                return github.AsyncGithubInstallationClient(
                    get_auth(owner, auth))

            def github_client(owner=None, auth=None):
                return github.GithubInstallationClient(get_auth(owner, auth))

            mock.patch.object(github, "get_client", github_client).start()
            mock.patch.object(github, "aget_client", github_aclient).start()

        with open(engine.mergify_rule_path, "r") as f:
            engine.MERGIFY_RULE = yaml.safe_load(f.read().replace(
                "mergify[bot]", "mergify-test[bot]"))

        mock.patch.object(branch_updater.utils, "Gitter",
                          self.get_gitter).start()
        mock.patch.object(duplicate_pull.utils, "Gitter",
                          self.get_gitter).start()

        if not RECORD:
            # NOTE(sileht): Don't wait exponentialy during replay
            mock.patch.object(context.Context._ensure_complete.retry, "wait",
                              None).start()

        # Web authentification always pass
        mock.patch("hmac.compare_digest", return_value=True).start()

        branch_prefix_path = os.path.join(self.cassette_library_dir,
                                          "branch_prefix")

        if RECORD:
            self.BRANCH_PREFIX = datetime.datetime.utcnow().strftime(
                "%Y%m%d%H%M%S")
            with open(branch_prefix_path, "w") as f:
                f.write(self.BRANCH_PREFIX)
        else:
            with open(branch_prefix_path, "r") as f:
                self.BRANCH_PREFIX = f.read()

        self.master_branch_name = self.get_full_branch_name("master")

        self.git = self.get_gitter(LOG)
        self.addCleanup(self.git.cleanup)

        self.loop = asyncio.get_event_loop()
        self.loop.run_until_complete(web.startup())
        self.app = testclient.TestClient(web.app)

        # NOTE(sileht): Prepare a fresh redis
        self.redis = utils.get_redis_for_cache()
        self.redis.flushall()
        self.subscription = subscription.Subscription(
            config.INSTALLATION_ID,
            self.SUBSCRIPTION_ACTIVE,
            "You're not nice",
            {"mergify-test1": config.ORG_ADMIN_GITHUB_APP_OAUTH_TOKEN},
            frozenset(
                getattr(subscription.Features, f)
                for f in subscription.Features.__members__)
            if self.SUBSCRIPTION_ACTIVE else frozenset(),
        )
        self.loop.run_until_complete(
            self.subscription.save_subscription_to_cache())

        # Let's start recording
        cassette = self.recorder.use_cassette("http.json")
        cassette.__enter__()
        self.addCleanup(cassette.__exit__)

        integration = pygithub.GithubIntegration(config.INTEGRATION_ID,
                                                 config.PRIVATE_KEY)
        self.installation_token = integration.get_access_token(
            config.INSTALLATION_ID).token

        base_url = config.GITHUB_API_URL
        self.g_integration = pygithub.Github(self.installation_token,
                                             base_url=base_url)
        self.g_admin = pygithub.Github(config.ORG_ADMIN_PERSONAL_TOKEN,
                                       base_url=base_url)
        self.g_fork = pygithub.Github(self.FORK_PERSONAL_TOKEN,
                                      base_url=base_url)

        self.o_admin = self.g_admin.get_organization(
            config.TESTING_ORGANIZATION)
        self.o_integration = self.g_integration.get_organization(
            config.TESTING_ORGANIZATION)
        self.u_fork = self.g_fork.get_user()
        assert self.o_admin.login == "mergifyio-testing"
        assert self.o_integration.login == "mergifyio-testing"
        assert self.u_fork.login in ["mergify-test2", "mergify-test3"]

        self.r_o_admin = self.o_admin.get_repo(self.REPO_NAME)
        self.r_o_integration = self.o_integration.get_repo(self.REPO_NAME)
        self.r_fork = self.u_fork.get_repo(self.REPO_NAME)

        self.url_main = f"{config.GITHUB_URL}/{self.r_o_integration.full_name}"
        self.url_fork = (
            f"{config.GITHUB_URL}/{self.u_fork.login}/{self.r_o_integration.name}"
        )

        self.cli_integration = github.get_client(config.TESTING_ORGANIZATION, )

        real_get_subscription = subscription.Subscription.get_subscription

        async def fake_retrieve_subscription_from_db(owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return self.subscription
            return subscription.Subscription(
                owner_id,
                False,
                "We're just testing",
                {},
                set(),
            )

        async def fake_subscription(owner_id):
            if owner_id == config.TESTING_ORGANIZATION_ID:
                return await real_get_subscription(owner_id)
            return subscription.Subscription(
                owner_id,
                False,
                "We're just testing",
                {},
                set(),
            )

        mock.patch(
            "mergify_engine.subscription.Subscription._retrieve_subscription_from_db",
            side_effect=fake_retrieve_subscription_from_db,
        ).start()

        mock.patch(
            "mergify_engine.subscription.Subscription.get_subscription",
            side_effect=fake_subscription,
        ).start()

        mock.patch(
            "github.MainClass.Installation.Installation.get_repos",
            return_value=[self.r_o_integration],
        ).start()

        self._event_reader = EventReader(self.app)
        self._event_reader.drain()
Ejemplo n.º 12
0
 def __init__(self, name, access_token):
     self.github = github.Github(access_token)
     self.repo = self.github.get_repo(name)
     self.name = name
Ejemplo n.º 13
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "lbry_part", help="part of lbry version to bump",
        choices=LBRY_PARTS
    )
    parser.add_argument(
        "--skip-lbryum", help="skip bumping lbryum, even if there are changes",
        action="store_true",
    )
    parser.add_argument(
        "--lbryum-part", help="part of lbryum version to bump",
        choices=LBRYUM_PARTS
    )
    parser.add_argument(
        "--last-release",
        help=("manually set the last release version. The default is to query and parse the"
              " value from the release page.")
    )
    parser.add_argument(
        "--skip-sanity-checks", action="store_true")
    parser.add_argument(
        "--require-changelog", action="store_true",
        help=("Set this flag to raise an exception if a submodules has changes without a"
              " corresponding changelog entry. The default is to log a warning")
    )
    parser.add_argument(
        "--skip-push", action="store_true",
        help="Set to not push changes to remote repo"
    )

    args = parser.parse_args()

    base = git.Repo(os.getcwd())
    branch = 'master'

    if not args.skip_sanity_checks:
        run_sanity_checks(base, branch)

    base_repo = Repo('lbry-app', args.lbry_part, os.getcwd())
    base_repo.assert_new_tag_is_absent()

    last_release = args.last_release or base_repo.get_last_tag()
    logging.info('Last release: %s', last_release)

    gh_token = get_gh_token()
    auth = github.Github(gh_token)
    github_repo = auth.get_repo('lbryio/lbry-app')

    names = ['lbryum', 'lbry']
    repos = {name: Repo(name, get_part(args, name)) for name in names}

    changelogs = {}

    for repo in repos.values():
        logging.info('Processing repo: %s', repo.name)
        repo.checkout(branch)
        last_submodule_hash = base_repo.get_submodule_hash(last_release, repo.name)
        if repo.has_changes_from_revision(last_submodule_hash):
            if repo.name == 'lbryum':
                if args.skip_lbryum:
                    continue
                if not repo.part:
                    repo.part = get_lbryum_part()
            entry = repo.get_changelog_entry()
            if entry:
                changelogs[repo.name] = entry.strip()
                repo.add_changelog()
            else:
                msg = 'Changelog entry is missing for {}'.format(repo.name)
                if args.require_changelog:
                    raise Exception(msg)
                else:
                    logging.warning(msg)
        else:
            logging.warning('Submodule %s has no changes.', repo.name)
            if repo.name == 'lbryum':
                # The other repos have their version track each other so need to bump
                # them even if there aren't any changes, but lbryum should only be
                # bumped if it has changes
                continue
        # bumpversion will fail if there is already the tag we want in the repo
        repo.assert_new_tag_is_absent()
        repo.bumpversion()

    release_msg = get_release_msg(changelogs, names)

    for name in names:
        base.git.add(name)

    base_repo.bumpversion()
    current_tag = base.git.describe()

    is_rc = re.match('\drc\d+$', current_tag) is not None

    github_repo.create_git_release(current_tag, current_tag, release_msg, draft=True,
                                   prerelease=is_rc)
    no_change_msg = ('No change since the last release. This release is simply a placeholder'
                     ' so that LBRY and LBRY App track the same version')
    lbrynet_daemon_release_msg = changelogs.get('lbry', no_change_msg)
    auth.get_repo('lbryio/lbry').create_git_release(
        current_tag, current_tag, lbrynet_daemon_release_msg, draft=True)

    if not args.skip_push:
        for repo in repos.values():
            repo.git.push(follow_tags=True)
        base.git.push(follow_tags=True, recurse_submodules='check')
    else:
        logging.info('Skipping push; you will have to reset and delete tags if '
                     'you want to run this script again. Take a look at reset.sh; '
                     'it probably does what you want.')
Ejemplo n.º 14
0
feedstocks_dir = os.path.expanduser(args.feedstocks_dir)
change_limit = args.limit

feedstocks.clone_all('conda-forge', feedstocks_dir)
feedstocks.fetch_feedstocks(feedstocks_dir)
regexp = re.compile(args.regexp)
randomised_feedstocks = [
    feedstock for feedstock in feedstocks.cloned_feedstocks(feedstocks_dir)
    if regexp.match(feedstock.package)
]
# Shuffle is in-place. :(
random.shuffle(randomised_feedstocks)

gh_token = conda_smithy.github.gh_token()
gh = github.Github(gh_token)

gh_me = gh.get_user()

if gh_me.login != 'conda-forge-admin':
    raise ValueError("The github token isn't that of conda-forge-admin (it's "
                     "for {}), I'm going to have to bail.".format(gh_me.login))

gh_forge = gh.get_organization('conda-forge')


def my_repos(gh_user):
    """
    List all of my repos.
    See https://github.com/PyGithub/PyGithub/issues/390 for rationale.
 def testBadUserAgent(self):
     self.assertRaises(
         github.BadUserAgentException, lambda: github.Github(
             self.login, self.password, user_agent="").get_user().name)
Ejemplo n.º 16
0
def make_github_agent(user=None, password=None):
    """ Create github agent to auth """
    if not user:
        return github.Github()
    else:
        return github.Github(user, password)
def main():
    """Collects necessary info and dumps it to disk."""
    branch_name = common.get_current_branch_name()
    if not common.is_current_branch_a_release_branch():
        raise Exception(
            'This script should only be run from the latest release branch.')

    parsed_args = _PARSER.parse_args()
    if parsed_args.github_username is None:
        raise Exception(
            'No GitHub username provided. Please re-run the '
            'script specifying a username using '
            '--github_username=<Your username>')
    github_username = parsed_args.github_username

    personal_access_token = common.get_personal_access_token()

    g = github.Github(personal_access_token)
    repo = g.get_organization('oppia').get_repo('oppia')
    repo_fork = g.get_repo('%s/oppia' % github_username)

    common.check_blocking_bug_issue_count(repo)
    common.check_prs_for_current_release_are_released(repo)

    python_utils.PRINT('Generating release summary...')
    generate_release_info.main(personal_access_token)

    if not os.path.exists(release_constants.RELEASE_SUMMARY_FILEPATH):
        raise Exception(
            'Release summary file %s is missing. Please re-run '
            'this script.' % release_constants.RELEASE_SUMMARY_FILEPATH)

    current_release_version_number = common.get_current_release_version_number(
        branch_name)
    target_branch = 'update-changelog-for-releasev%s' % (
        current_release_version_number)

    remove_updates_and_delete_branch(repo_fork, target_branch)

    # Opens Credit Form.
    python_utils.PRINT(
        'Note: Make following changes directly to %s and make sure to '
        'save the file after making these changes.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))

    common.ask_user_to_confirm(
        'Check emails and names for new authors and new contributors in the '
        'file: %s and verify that the emails are '
        'correct through welcome emails sent from [email protected] '
        '(confirm with Sean in case of doubt). Please ensure that you correct '
        'the emails of the form: %s.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH,
            release_constants.INVALID_EMAIL_SUFFIX))
    common.open_new_tab_in_browser_if_possible(
        release_constants.CREDITS_FORM_URL)
    common.ask_user_to_confirm(
        'Check the credits form and add any additional contributors '
        'to the contributor list in the file: %s.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Categorize the PR titles in the Uncategorized section of the '
        'changelog in the file: %s, and arrange the changelog '
        'to have user-facing categories on top.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Verify each item is in the correct section in the '
        'file: %s and remove trivial changes like "Fix lint errors" '
        'from the changelog.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Ensure that all items in changelog in the file: %s '
        'start with a verb in simple present tense.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))
    common.ask_user_to_confirm(
        'Please save the file: %s with all the changes that '
        'you have made.' % (
            release_constants.RELEASE_SUMMARY_FILEPATH))

    release_summary_lines = get_release_summary_lines()

    update_changelog(
        branch_name, release_summary_lines, current_release_version_number)
    update_authors(release_summary_lines)
    update_contributors(release_summary_lines)
    update_developer_names(release_summary_lines)

    message = (
        'Please check the changes and make updates if required in the '
        'following files:\n1. %s\n2. %s\n3. %s\n4. %s\n' % (
            CHANGELOG_FILEPATH, AUTHORS_FILEPATH, CONTRIBUTORS_FILEPATH,
            ABOUT_PAGE_CONSTANTS_FILEPATH))
    common.ask_user_to_confirm(message)

    create_branch(
        repo, repo_fork, target_branch, github_username,
        current_release_version_number)
Ejemplo n.º 18
0
 def __init__(self, github_organizations):
     self.github_organizations = github_organizations
     self.github_client = github.Github()
Ejemplo n.º 19
0
def main(summary_path, repos_xml_path):
    GITHUB_USERNAME = os.environ['GITHUB_USERNAME']
    GITHUB_TOKEN_FILE = os.environ['GITHUB_TOKEN_FILE']
    GITHUB_ORG = os.environ['GITHUB_ORG']

    with open(summary_path) as f:
        repos = json.load(f)

    with open(GITHUB_TOKEN_FILE) as f:
        token = f.read().strip()

    g = github.Github(GITHUB_USERNAME, token, per_page=50)
    gu = g.get_organization(GITHUB_ORG)
    gh_repos = set()

    # check repo states
    for data in repos.values():
        # 1. we don't add repos with broken metadata but we also don't
        # remove existing ones -- we hope maintainers will fix them,
        # or overlays team will remove them
        #
        # 2. remove repos with unsupported VCS -- this means that
        # upstream has switched, and there's no point in keeping
        # an outdated mirror
        #
        # 3. we can't update repos which are broken to the point of
        # being implicitly removed

        data['x-can-create'] = data['x-state'] in ('GOOD', 'BAD_CACHE')
        data['x-can-update'] = data['x-can-create']
        data['x-should-remove'] = data['x-state'] in ('REMOVED', 'UNSUPPORTED')

    # 0. scan all repos
    to_remove = []
    to_update = []
    for i, r in enumerate(gu.get_repos()):
        sys.stderr.write('\r@ scanning [%-3d/%-3d]' % (i + 1, gu.public_repos))
        if r.name not in repos or repos[r.name]['x-should-remove']:
            to_remove.append(r)
        else:
            gh_repos.add(r.name)
            if repos[r.name]['x-can-update']:
                to_update.append(r)
            repos[r.name]['x-mirror-sources'] = gh_sources(r)
    sys.stderr.write('\n')

    # 1. delete stale repos
    for r in to_remove:
        sys.stderr.write('* removing %s\n' % r.name)
        r.delete()

    # 2. now create new repos :)
    for r, data in sorted(repos.items()):
        if r not in gh_repos and data['x-can-create']:
            sys.stderr.write('* adding %s\n' % r)
            gr = gu.create_repo(
                r,
                description=' '.join(
                    data.get('description', {}).get('en').split())
                or github.GithubObject.NotSet,
                homepage=data.get('homepage') or github.GithubObject.NotSet,
                has_issues=False,
                has_wiki=False)
            repos[r]['x-mirror-sources'] = gh_sources(gr)
            to_update.append(gr)

    # 3. write a new repositories.xml for them
    root = et.Element('repositories')
    root.set('version', '1.0')

    for r, data in sorted(repos.items()):
        if 'x-mirror-sources' not in data:
            continue

        rel = et.Element('repo')
        for attr, val in sorted(data.items(), key=dtd_sort_key):
            if attr.startswith('x-'):
                continue
            elif attr == 'source':  # replace
                for t, url in data['x-mirror-sources']:
                    subel = et.Element('source')
                    subel.set('type', t)
                    subel.text = url
                    rel.append(subel)
            elif attr in ('quality', 'status'):  # attributes
                rel.set(attr, val)
            elif attr in ('name', 'homepage'):  # single-value
                subel = et.Element(attr)
                subel.text = val
                rel.append(subel)
            elif attr in ('description', 'longdescription'):  # lang-dict
                for l, v in val.items():
                    subel = et.Element(attr)
                    subel.set('lang', l)
                    subel.text = v
                    rel.append(subel)
            elif attr in ('owner', 'feed'):  # lists
                for v in val:
                    subel = et.Element(attr)
                    if attr == 'owner':
                        for k, subval in v.items():
                            if k == 'type':
                                subel.set(k, subval)
                            else:
                                subsubel = et.Element(k)
                                subsubel.text = subval
                                subel.append(subsubel)
                    else:
                        subel.text = v
                    rel.append(subel)

        root.append(rel)

    xml = et.ElementTree(root)
    with open(repos_xml_path, 'wb') as f:
        f.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
        f.write(
            b'<!DOCTYPE repositories SYSTEM "http://www.gentoo.org/dtd/repositories.dtd">\n'
        )
        xml.write(f, 'UTF-8', False)

    print('DELETED_REPOS = %s' % ' '.join(r.name for r in to_remove))
    print('REPOS = %s' % ' '.join(r.name for r in to_update))
Ejemplo n.º 20
0
def collect_metrics():
    redis = utils.get_redis_for_cache()
    integration = github.GithubIntegration(config.INTEGRATION_ID,
                                           config.PRIVATE_KEY)

    installations = collections.defaultdict(int)
    repositories_per_installation = collections.defaultdict(int)
    users_per_installation = collections.defaultdict(int)

    LOG.info("GitHub Polling started")

    redis.delete("badges.tmp")

    for installation in utils.get_installations(integration):
        try:
            _id = installation["id"]
            target_type = installation["target_type"]
            account = installation["account"]["login"]

            LOG.info("Get subscription", account=account)
            subscribed = sub_utils.get_subscription(redis,
                                                    _id)["subscription_active"]

            installations[(subscribed, target_type)] += 1

            token = integration.get_access_token(_id).token
            g = github.Github(token,
                              base_url="https://api.%s" % config.GITHUB_DOMAIN)

            if installation["target_type"] == "Organization":
                LOG.info("Get members",
                         install=installation["account"]["login"])
                org = g.get_organization(installation["account"]["login"])
                value = len(list(org.get_members()))

                users_per_installation[(subscribed, target_type,
                                        account)] = value
            else:
                users_per_installation[(subscribed, target_type, account)] = 1

            LOG.info("Get repos", account=account)

            repositories = sorted(g.get_installation(_id).get_repos(),
                                  key=operator.attrgetter("private"))
            for private, repos in itertools.groupby(
                    repositories, key=operator.attrgetter("private")):

                configured_repos = 0
                unconfigured_repos = 0
                for repo in repos:
                    try:
                        repo.get_contents(".mergify.yml")
                        configured_repos += 1
                        redis.sadd("badges.tmp", repo.full_name)
                    except github.GithubException as e:
                        if e.status >= 500:  # pragma: no cover
                            raise
                        unconfigured_repos += 1

                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               True)] = configured_repos
                repositories_per_installation[(subscribed, target_type,
                                               account, private,
                                               False)] = unconfigured_repos
        except github.GithubException as e:  # pragma: no cover
            # Ignore rate limit/abuse
            if e.status != 403:
                raise

    LOG.info("GitHub Polling finished")

    # NOTE(sileht): Prometheus can scrape data during our loop. So make it fast
    # to ensure we always have the good values.
    # Also we can't known which labels we should delete from the Gauge,
    # that's why we delete all of them to re-add them.
    # And prometheus_client doesn't provide API to that, so we just
    # override _metrics
    set_gauges(INSTALLATIONS, installations)
    set_gauges(USERS_PER_INSTALLATION, users_per_installation)
    set_gauges(REPOSITORIES_PER_INSTALLATION, repositories_per_installation)

    if redis.exists("badges.tmp"):
        redis.rename("badges.tmp", "badges")

    LOG.info("Gauges and badges cache updated")
Ejemplo n.º 21
0
 def __init__(self, config):
     token = self.get_token(config)
     self.g = github.Github(token)
     self._repos = config['repos']
     self.monitoring_db = monitoring.GetDatabase('spinbot')
     self.logging = logging.getLogger('github_client_wrapper')
Ejemplo n.º 22
0
    def test_maintainer_exists(self):
        lints, hints = linter.lintify(
            {'extra': {
                'recipe-maintainers': ['support']
            }}, conda_forge=True)
        expected_message = ('Recipe maintainer "support" does not exist')
        self.assertIn(expected_message, lints)

        lints = linter.lintify({'extra': {
            'recipe-maintainers': ['isuruf']
        }},
                               conda_forge=True)
        expected_message = ('Recipe maintainer "isuruf" does not exist')
        self.assertNotIn(expected_message, lints)

        expected_message = 'Feedstock with the same name exists in conda-forge'
        # Check that feedstock exists if staged_recipes
        lints = linter.lintify({'package': {
            'name': 'python'
        }},
                               recipe_dir='python',
                               conda_forge=True)
        self.assertIn(expected_message, lints)
        lints = linter.lintify({'package': {
            'name': 'python'
        }},
                               recipe_dir='python',
                               conda_forge=False)
        self.assertNotIn(expected_message, lints)
        # No lint if in a feedstock
        lints = linter.lintify({'package': {
            'name': 'python'
        }},
                               recipe_dir='recipe',
                               conda_forge=True)
        self.assertNotIn(expected_message, lints)
        lints = linter.lintify({'package': {
            'name': 'python'
        }},
                               recipe_dir='recipe',
                               conda_forge=False)
        self.assertNotIn(expected_message, lints)

        # Make sure there's no feedstock named python1 before proceeding
        gh = github.Github(os.environ['GH_TOKEN'])
        cf = gh.get_user('conda-forge')
        try:
            cf.get_repo('python1-feedstock')
            feedstock_exists = True
        except github.UnknownObjectException as e:
            feedstock_exists = False

        if feedstock_exists:
            warnings.warn(
                "There's a feedstock named python1, but tests assume that there isn't"
            )
        else:
            lints = linter.lintify({'package': {
                'name': 'python1'
            }},
                                   recipe_dir="python",
                                   conda_forge=True)
            self.assertNotIn(expected_message, lints)

        # Test bioconda recipe checking
        expected_message = (
            "Recipe with the same name exists in bioconda: "
            "please discuss with @conda-forge/bioconda-recipes.")
        bio = gh.get_user('bioconda').get_repo('bioconda-recipes')
        r = 'samtools'
        try:
            bio.get_dir_contents('recipe/{}'.format(r))
        except github.UnknownObjectException as e:
            warnings.warn(
                "There's no bioconda recipe named {}, but tests assume that there is"
                .format(r))
        else:
            # Check that feedstock exists if staged_recipes
            lints = linter.lintify({'package': {
                'name': r
            }},
                                   recipe_dir=r,
                                   conda_forge=True)
            self.assertIn(expected_message, lints)
            lints = linter.lintify({'package': {
                'name': r
            }},
                                   recipe_dir=r,
                                   conda_forge=False)
            self.assertNotIn(expected_message, lints)
            # No lint if in a feedstock
            lints = linter.lintify({'package': {
                'name': r
            }},
                                   recipe_dir='recipe',
                                   conda_forge=True)
            self.assertNotIn(expected_message, lints)
            lints = linter.lintify({'package': {
                'name': r
            }},
                                   recipe_dir='recipe',
                                   conda_forge=False)
            self.assertNotIn(expected_message, lints)

        r = 'this-will-never-exist'
        try:
            bio.get_dir_contents('recipes/{}'.format(r))
        except github.UnknownObjectException as e:
            lints = linter.lintify({'package': {
                'name': r
            }},
                                   recipe_dir=r,
                                   conda_forge=True)
            self.assertNotIn(expected_message, lints)
        else:
            warnings.warn(
                "There's a bioconda recipe named {}, but tests assume that there isn't"
                .format(r))
Ejemplo n.º 23
0
 def testGetAuthorizationsSucceedsWhenAutenticatedThroughLoginPassword(
         self):
     g = github.Github(self.login, self.password)
     self.assertListKeyEqual(
         g.get_user().get_authorizations(), lambda a: a.note,
         [None, None, 'cligh', None, None, 'GitHub Android App'])
Ejemplo n.º 24
0
 def __repo(self) -> github.Repository.Repository:
     return github.Github(self.token).get_repo(self.repo_name)
Ejemplo n.º 25
0
# get inputs
workflow_name = os.environ["GITHUB_WORKFLOW"]
polling_interval = _parse_int("POLL_INTERVAL", 60)
continue_after = _parse_int("CONTINUE_AFTER", None)
abort_after = _parse_int("ABORT_AFTER", None)

if "GITHUB_HEAD_REF" in os.environ and len(os.environ["GITHUB_HEAD_REF"]) > 0:
    branch = os.environ["GITHUB_HEAD_REF"]
elif "GITHUB_REF" in os.environ and len(os.environ["GITHUB_REF"]) > 0:
    branch = os.environ["GITHUB_REF"][11:]
else:
    # TODO: should this be main?
    branch = "master"
print("computed branch '%s' for workflow" % branch, flush=True)

gh = github.Github(os.environ["GITHUB_TOKEN"])
repo = gh.get_repo(os.environ["GITHUB_REPOSITORY"])

wf = None
limit = 100
done = 0
for _wf in repo.get_workflows():
    if _wf.name == workflow_name:
        wf = _wf
        break

    done += 1
    if done == limit:
        break

if wf is None:
Ejemplo n.º 26
0
    parser.add_argument("auth_token")
    parser.add_argument("version", type=version_type)
    subparsers = parser.add_subparsers(dest="subparser_name",
                                       help="Subcommands: check, move_milestone, rn, release")
    subparsers.required = True

    check_parser = subparsers.add_parser("check")
    check_parser.set_defaults(func=check_release_exists)

    milestone_parser = subparsers.add_parser("move_milestone")
    milestone_parser.set_defaults(func=move_milestone)
    milestone_parser.add_argument("next_version", type=version_type)

    rn_parser = subparsers.add_parser("rn")
    rn_parser.set_defaults(func=generate_release_notes)

    release = subparsers.add_parser("release")
    release.set_defaults(func=create_release)
    release.add_argument("commit")

    return parser.parse_args()


if __name__ == "__main__":
    parser = create_parser()

    gh = github.Github(parser.auth_token)
    repo = get_repo(gh, parser.owner, parser.repo)

    parser.func(repo, parser)
def issue_comment(org_name, repo_name, issue_num, title, comment):
    if not repo_name.endswith("-feedstock"):
        return

    text = comment + title

    issue_commands = [UPDATE_TEAM_MSG, ADD_NOARCH_MSG, UPDATE_CIRCLECI_KEY_MSG,
                      RERENDER_MSG, UPDATE_CB3_MSG]
    send_pr_commands = [ADD_NOARCH_MSG, RERENDER_MSG, UPDATE_CB3_MSG]

    if not any(command.search(text) for command in issue_commands):
        return

    gh = github.Github(os.environ['GH_TOKEN'])
    repo = gh.get_repo("{}/{}".format(org_name, repo_name))
    issue = repo.get_issue(int(issue_num))

    if UPDATE_TEAM_MSG.search(text):
        update_team(org_name, repo_name)
        if UPDATE_TEAM_MSG.search(title):
            issue.edit(state="closed")
        message = textwrap.dedent("""
                Hi! This is the friendly automated conda-forge-webservice.

                I just wanted to let you know that I updated the team with maintainers from master.
                """)
        issue.create_comment(message)

    if UPDATE_CIRCLECI_KEY_MSG.search(text):
        update_circle(org_name, repo_name)
        if UPDATE_CIRCLECI_KEY_MSG.search(title):
            issue.edit(state="closed")
        message = textwrap.dedent("""
                Hi! This is the friendly automated conda-forge-webservice.

                I just wanted to let you know that I updated the circle-ci deploy key and followed the project.
                """)
        issue.create_comment(message)

    if any(command.search(text) for command in send_pr_commands):
        forked_user = gh.get_user().login
        forked_repo = gh.get_user().create_fork(repo)

        with tmp_directory() as tmp_dir:
            feedstock_dir = os.path.join(tmp_dir, repo_name)
            repo_url = "https://{}@github.com/{}/{}.git".format(
                os.environ['GH_TOKEN'], forked_user, repo_name)
            upstream_repo_url = "https://{}@github.com/{}/{}.git".format(
                os.environ['GH_TOKEN'], org_name, repo_name)
            git_repo = Repo.clone_from(repo_url, feedstock_dir)
            forked_repo_branch = 'conda_forge_admin_{}'.format(issue_num)
            upstream = git_repo.create_remote('upstream', upstream_repo_url)
            upstream.fetch()
            new_branch = git_repo.create_head(forked_repo_branch, upstream.refs.master)
            new_branch.checkout()

            changed_anything = False
            extra_msg = ""
            if UPDATE_CB3_MSG.search(text):
                pr_title = "MNT: Update for conda-build 3"
                comment_msg = "updated the recipe for conda-build 3"
                to_close = UPDATE_CB3_MSG.search(title)

                if ADD_NOARCH_MSG.search(text):
                    changed_anything |= make_noarch(git_repo)
                    pr_title += ' and add noarch: python'
                    comment_msg += ' and added `noarch: python`'

                c, cb3_changes = update_cb3(git_repo)
                changed_anything |= c
                if not c:
                    cb3_changes = "There weren't any changes to make for conda-build 3."
                extra_msg = '\n\n' + cb3_changes

                changed_anything |= rerender(git_repo)
            elif ADD_NOARCH_MSG.search(text):
                pr_title = "MNT: Add noarch: python"
                comment_msg = "made the recipe `noarch: python`"
                to_close = ADD_NOARCH_MSG.search(title)

                changed_anything |= make_noarch(git_repo)
                changed_anything |= rerender(git_repo)

            elif RERENDER_MSG.search(text):
                pr_title = "MNT: rerender"
                comment_msg = "rerendered the recipe"
                to_close = RERENDER_MSG.search(title)

                changed_anything |= rerender(git_repo)

            if changed_anything:
                git_repo.git.push("origin", forked_repo_branch)
                pr_message = textwrap.dedent("""
                        Hi! This is the friendly automated conda-forge-webservice.

                        I've {} as instructed in #{}.{}

                        Here's a checklist to do before merging.
                        - [ ] Bump the build number if needed.
                        """).format(comment_msg, issue_num, extra_msg)

                if to_close:
                    pr_message += "\nFixes #{}".format(issue_num)

                pr = repo.create_pull(
                    pr_title, pr_message,
                    "master", "{}:{}".format(forked_user, forked_repo_branch))

                message = textwrap.dedent("""
                        Hi! This is the friendly automated conda-forge-webservice.

                        I just wanted to let you know that I {} in {}/{}#{}.
                        """).format(comment_msg, org_name, repo_name, pr.number)
                issue.create_comment(message)
            else:
                message = textwrap.dedent("""
                        Hi! This is the friendly automated conda-forge-webservice.

                        I've {} as requested, but nothing actually changed.
                        """).format(comment_msg)
                issue.create_comment(message)
                if to_close:
                    issue.edit(state="closed")
 def testBadCredentials(self):
     self.assertRaises(
         github.BadCredentialsException,
         lambda: github.Github("BadUser", "BadPassword").get_user().login)
Ejemplo n.º 29
0
 def testNoAuthentication( self ):
     g = github.Github()
     self.assertEqual( g.get_user( "jacquev6" ).name, "Vincent Jacques" )
Ejemplo n.º 30
0
 def __init__(self):
     self.git_token = get_env('GIT_TOKEN')
     self.git_instance = github.Github(self.git_token)