def rescan():
    """
    Used to pick up PRs that might not have tickets associated with them.
    """
    if request.method == "GET":
        # just render the form
        return render_template("github_rescan.html")
    repo = request.form.get("repo") or "edx/edx-platform"
    inline = request.form.get("inline", False)
    if repo == 'all' and inline:
        return "Don't be silly."

    if inline:
        return jsonify(rescan_repository(repo))

    if repo == 'all':
        repos = get_repos_file(session=github).keys()
        workflow = group(
            rescan_repository.s(repo, wsgi_environ=minimal_wsgi_environ())
            for repo in repos
        )
        group_result = workflow.delay()
        group_result.save()  # this is necessary for groups, for some reason
        status_url = url_for("tasks.group_status", group_id=group_result.id, _external=True)
    else:
        result = rescan_repository.delay(repo, wsgi_environ=minimal_wsgi_environ())
        status_url = url_for("tasks.status", task_id=result.id, _external=True)

    resp = jsonify({"message": "queued", "status_url": status_url})
    resp.status_code = 202
    resp.headers["Location"] = status_url
    return resp
def rescan():
    """
    Re-scan GitHub repositories to find pull requests that need OSPR issues
    on JIRA, and do not have them. If this method finds pull requests that are
    missing JIRA issues, it will automatically process those pull requests
    just as though the pull request was newly created.

    Note that this rescan functionality is the reason why
    :func:`~openedx_webhooks.tasks.github.pull_request_opened`
    must be idempotent. It could run many times over the same pull request.
    """
    repo = request.form.get("repo") or "edx/edx-platform"
    inline = request.form.get("inline", False)
    if repo == 'all' and inline:
        return "Don't be silly."

    if inline:
        return jsonify(rescan_repository(repo))

    if repo.startswith('all:'):
        org = repo[4:]
        org_url = "https://api.github.com/orgs/{org}/repos".format(org=org)
        repo_names = [
            repo_name['full_name'] for repo_name in paginated_get(org_url)
        ]
        workflow = group(
            rescan_repository.s(repository,
                                wsgi_environ=minimal_wsgi_environ())
            for repository in repo_names)
        group_result = workflow.delay()
        group_result.save()  # this is necessary for groups, for some reason
        status_url = url_for("tasks.group_status",
                             group_id=group_result.id,
                             _external=True)
    else:
        result = rescan_repository.delay(repo,
                                         wsgi_environ=minimal_wsgi_environ())
        status_url = url_for("tasks.status", task_id=result.id, _external=True)

    resp = jsonify({"message": "queued", "status_url": status_url})
    resp.status_code = 202
    resp.headers["Location"] = status_url
    return resp
def rescan():
    """
    Re-scan GitHub repositories to find pull requests that need OSPR issues
    on JIRA, and do not have them. If this method finds pull requests that are
    missing JIRA issues, it will automatically process those pull requests
    just as though the pull request was newly created.

    Note that this rescan functionality is the reason why
    :func:`~openedx_webhooks.tasks.github.pull_request_opened`
    must be idempotent. It could run many times over the same pull request.
    """
    repo = request.form.get("repo") or "edx/edx-platform"
    inline = request.form.get("inline", False)
    if repo == 'all' and inline:
        return "Don't be silly."

    if inline:
        return jsonify(rescan_repository(repo))

    if repo == 'all':
        repos = get_repos_file().keys()
        workflow = group(
            rescan_repository.s(repo, wsgi_environ=minimal_wsgi_environ())
            for repo in repos
        )
        group_result = workflow.delay()
        group_result.save()  # this is necessary for groups, for some reason
        status_url = url_for("tasks.group_status", group_id=group_result.id, _external=True)
    else:
        result = rescan_repository.delay(repo, wsgi_environ=minimal_wsgi_environ())
        status_url = url_for("tasks.status", task_id=result.id, _external=True)

    resp = jsonify({"message": "queued", "status_url": status_url})
    resp.status_code = 202
    resp.headers["Location"] = status_url
    return resp