"old_issue_number": issue["issue_number"],
                    "pipeline_id": pipe.get("id"),
                    "pipeline_name": pipe.get("name"),
                    "position": issue.get("position"),
                    "repo_id": DEST_REPO_ID,
                }

    # assign positions to issues
    issues = _utils.load_issues(DIR)

    for issue in issues:
        repo_id = issue.get("repo_id")

        if repo["id"] == 140626918:
            """
            we skip in atd-data-tech
            Those issue will not have pipelines updated,
            but do need to reconnect the dependencies, etc.
            """
            continue

        issue_number = issue.get("number")
        key = f"{repo_id}${issue_number}"
        issue["migration"]["pipeline"] = issues_with_positions.get(key)
        write_issue(issue, DIR)


if __name__ == "__main__":
    logger = get_logger("apply_pipeline_positions")
    main()
            new_issue = {
                "repo_name": r["name"],
                "repo_id": r["id"],
                "assignees": [person.login for person in issue.assignees],
                "labels": [label.name for label in issue.labels],
                "state": issue.state,
                "url": issue.url,
                "number": issue.number,
                "title": issue.title,
                "body": issue.body,
            }

            if issue.milestone:
                new_issue["milestone"] = issue.milestone.title

            fname = f"{DIR}/{new_issue['repo_name']}${new_issue['number']}.json"

            new_issue["migration"] = {}
            new_issue["path"] = fname

            with open(fname, "w") as fout:
                logger.info(f"{new_issue['repo_name']} {new_issue['number']}")
                fout.write(json.dumps(new_issue))
                issue_count += 1
    logger.info(f"Issues Processed: {issue_count}")

if __name__ == "__main__":
    logger = get_logger("download_github")
    main()
                        e["new_issue_number"]
                    })
                else:
                    logger.error(
                        f"Child issue for issue #{issue_number} does not exist: {e}"
                    )

            if payload["add_issues"]:

                res = zenhub_request(DEST_REPO_ID, issue_number, payload)

                if not res:
                    logger.error(f"ERROR: {issue['path']}")
                    issue["migration"]["epic_created"] = False
                else:
                    logger.info(issue["path"])

                issue["migration"]["epic_created"] = True

                write_issue(issue, DIR)
                epic_count += 1

    logger.info(f"Issues Processed: {issue_count}")
    logger.info(f"Epics Processed: {epic_count}")
    logger.info(f"Child Issues Processed: {child_issue_count}")


if __name__ == "__main__":
    logger = get_logger("convert_epics")
    main()
Ejemplo n.º 4
0
        else:
            print(e)
            return None

    return res.json()


def main():

    with open("releases_with_new_issue_numbers.json", "r") as fin:
        data = json.loads(fin.read())

    for rel_id in data.keys():
        rel = data[rel_id]
        if rel.get("state") == "closed":
            continue
        rel.pop("issues")
        rel.pop("state")
        rel["repositories"] = [DEST_REPO_ID]
        rel.pop("release_id")
        rel.pop("created_at")
        rel.pop("closed_at")
        res = zenhub_request(DEST_REPO_ID, rel)
        pdb.set_trace()


if __name__ == "__main__":
    logger = get_logger("delete")
    main()
            child_issue_count += 1

            if not new_issue_number:
                # child issue has not been processed, it's probably a closed issue
                # which we're not migrating
                missing_epic_issues.append({
                    "repo_id":
                    child_issue["repo_id"],
                    "issue_number":
                    child_issue["issue_number"],
                })

        # write update issue to file
        issue["migration"]["epics_staged"] = True
        write_issue(issue, DIR)
        epic_count += 1

    logger.info(f"Issues Processed: {issue_count}")
    logger.info(f"Epics Processed: {epic_count}")
    logger.info(f"Child Issues Processed: {child_issue_count}")

    with open(MISSING_CHILDREN_FILE, "w") as fout:
        fout.write(json.dumps(missing_epic_issues))


# logger.info(f"Issues Processed: {issue_count}")

if __name__ == "__main__":
    logger = get_logger("stage_epics")
    main()
    for issue in issues:

        if issue.get("is_epic") and issue.get("repo_id") != 140626918:
            """
            we skip existing atd-data-tech epics.
            These issues already exist, but we need to connect the dependencies, etc.
            """

            # new issue number of issue that will be converted to epic
            issue_number = issue["migration"].get("new_issue_number")
            
            payload = {"issues": []}

            res = zenhub_request(DEST_REPO_ID, issue_number, payload)

            if not res:
                logger.error(f"ERROR: {issue['path']}")
                issue["migration"]["epic_created"] = False
            else:
                logger.info(issue["path"])
                issue["migration"]["epic_created"] = True

            write_issue(issue, DIR)
            issue_count += 1

    logger.info(f"Issues Processed: {issue_count}")

if __name__ == "__main__":
    logger = get_logger("creat_epics")
    main()
    #     for relid in rels_temp.keys():
    #         name = rels_temp[relid]["title"]
    #         rel_map[name] = relid

    for rel_id in rels:
        if rels[rel_id]["state"] == "closed":
            continue

        # title = rels[rel_id]["title"] # this mapping is only needed in test
        # try:
        #     mapped_id = rel_map[title]
        # except:
        #     pdb.set_trace() # emoji are killing this // will not be an issue in prod
        #     print("emoji fail")
        payload = {"add_issues": [], "remove_issues": []}

        if rels[rel_id]["issues"]:
            payload["add_issues"] = rels[rel_id]["issues"]
            res = zenhub_request(DEST_REPO_ID, payload)

            if not res:
                logger.error(f"ERROR: {payload}")

            else:
                logger.info(payload)


if __name__ == "__main__":
    logger = get_logger("set_releases")
    main()
Ejemplo n.º 8
0
            print(res.text)
            return None

        if res.status_code == 403:
            print(res.text)
            return None

        else:
            print(e)
            return None

    return res.json()


def main():

    dp = load(DEPEND_FILE)

    for d in dp:
        res = zenhub_request(d)

        if not res:
            logger.error(f"ERROR: {d}")
        else:
            logger.info(d)


if __name__ == "__main__":
    logger = get_logger("update_dependencies")
    main()
Ejemplo n.º 9
0
            But we do need to reference these isssue
            To connect the dependencies and epics to new issues.
            """
            issue["migration"]["created_github"] = True
            issue["migration"]["new_issue_number"] = issue["number"]
            write_issue(issue, DIR)
            issue_count += 1
            continue
        else:
            res = create_issue(issue, DEST_REPO)
            issue["migration"]["created_github"] = True
            issue["migration"]["new_issue_number"] = res["number"]
            issue_count += 1
            created_count += 1
            write_issue(issue, DIR)

    logger.info(f"Issues Processed: {issue_count}")
    logger.info(f"Issues Created: {created_count}")

    # issue["migration"]["source_body_update"] = update_body(
    #     issue["body"], res["number"], DEST_REPO
    # )
    # res = update_issue(issue)
    # issue["migration"]["updated_source_github"] = True
    # write_issue(issue, DIR)


if __name__ == "__main__":
    logger = get_logger("github_create")
    main()
        # disgusting hack because i'm tired of writing this  migation
        # we have to be able to distinguish between map and map/append
        # label actions. i don't want to refactor all the code so
        # we're squeezing the action into the destination label_name
        label_dest = label_dest + "_&_" + label_action
        lookup[label_src] = label_dest

    return lookup


def main():
    with open(LABEL_FILE, "r") as fin:
        reader = csv.DictReader(fin)
        label_map = [row for row in reader if "map" in row["action"]]

        label_lookup = build_lookup(label_map)

    issues = _utils.load_issues(DIR)

    for issue in issues:
        labels = issue.get("labels")
        labels = map_labels(labels, label_lookup)
        labels = map_repos(labels, issue["repo_name"], REPO_MAP)
        issue["migration"]["labels"] = labels
        write_issue(issue, DIR)


if __name__ == "__main__":
    logger = get_logger("apply_labels")
    main()
Ejemplo n.º 11
0
            print(res.text)
            return None

        else:
            print(e)
            return none

    return res.json()


def main():

    all_dependencies = []

    for repo in SOURCE_REPOS:
        repo_id = repo["id"]
        res = zenhub_request(repo_id)

        if res == None:
            logger.error(repo_id)

        all_dependencies.extend(res["dependencies"])

    with open(outfile, "w") as fout:
        fout.write(json.dumps(all_dependencies))


if __name__ == "__main__":
    logger = get_logger("get_dependencies")
    main()
Ejemplo n.º 12
0
    issues = _utils.load_issues(DIR)

    # get the new issue number of every issue in a release
    for issue in issues:
        repo_id = issue.get("repo_id")
        issue_number = issue.get("number")
        key = f"{repo_id}${issue_number}"

        if key in lookup:
            new_issue_number = issue["migration"].get("new_issue_number")

            if not new_issue_number:
                # issue has apparently not been created in github
                logger.error(key)
                raise Exception(f"New {issue_number} does not exist in github!")

            lookup[key]["new_issue_number"] = new_issue_number

    # build new releases file
    releases = update_releases(releases, lookup)

    releases = drop_null_issues(releases)

    with open(DEST_FILE, "w") as fout:
        fout.write(json.dumps(releases))


if __name__ == "__main__":
    logger = get_logger("process_releases")
    main()
            links = requests.utils.parse_header_links(res.headers["Link"])
        except KeyError:
            # if there's only one page there will be no link headers
            break

        if links:
            for link in links:
                if link.get("rel") == "next":
                    url = link.get("url")
                    print(url)
                elif link.get("rel") == "last":
                    last_url = link.get("url")

    return milestones


def main():
    issues = _utils.load_issues(DIR)
    
    dest_milestones = get_milestones_from_repo(DEST_REPO)

    issues, update_count = update_milestones(issues, dest_milestones)

    issue_count = write_issues(issues, DIR)
    logger.info(f"Issues Processed: {issue_count}")
    logger.info(f"Milestones Updated: {update_count}")

if __name__ == "__main__":
    logger = get_logger("set_milestones")
    main()
Ejemplo n.º 14
0
        if issue["migration"].get("zenhub_downloaded"):
            continue

        time.sleep(.6)  # zenhub rate limit is 100 requests/minute

        try:
            issue = get_zenhub_issue(issue)
        except:
            issue["migration"]["zenhub_downloaded"] = False
            logger.error(f"Error: {issue['path']}")
            error_count += 1
            continue

        if issue["is_epic"]:
            get_epic_issues(issue)

        fname = issue["path"]

        with open(fname, "w") as fout:
            logger.info(f"{issue['repo_name']} {issue['number']}")
            fout.write(json.dumps(issue))
            issue_count += 1

    logger.info(f"Issues Processed: {issue_count}")
    logger.info(f"Errors: {error_count}")


if __name__ == "__main__":
    logger = get_logger("download_zenhub")
    main()
Ejemplo n.º 15
0

def main():
    issues = _utils.load_issues(DIR)
    
    issue_count = 0

    for issue in issues:
        if (
            not issue.get("migration").get("comments_retreived")
            and issue.get("repo_id") != 140626918
        ):
            """
            we skip comments atd-data-tech
            The issues already exist, but we need to connect the dependencies, etc.
            """
            issue["comments"] = get_comments(issue["repo_name"], issue["number"])
            issue["comments"] = parse_comments(issue["comments"])
            issue["migration"]["comments_retreived"] = True

        logger.info(issue["number"])
        write_issue(issue, DEST_REPO)
        issue_count += 1

    logger.info(f"Issues Processed: {issue_count}")


if __name__ == "__main__":
    logger = get_logger("download_comments")
    main()
Ejemplo n.º 16
0
    dest_labels = get_labels_from_repo(DEST_REPO)

    dest_labels = [label["name"] for label in dest_labels]

    # get all labels from source repos
    source_labels = get_labels_from_source_repos(SOURCE_REPOS)

    # get all labels from label file
    file_labels = get_labels_from_csv(LABEL_FILE)

    # merge label definitions from file and source repos
    source_labels.update(file_labels)

    # get all labels from labeled issues
    issue_labels = get_labels_from_issues(DIR)

    create_labels = get_missing_labels(dest_labels, source_labels,
                                       issue_labels)

    for label in create_labels:
        try:
            create_label(label, DEST_REPO)
            logger.info(f"Created {label['name']}")
        except:
            logger.error(f"ERROR: {label['name']}")


if __name__ == "__main__":
    logger = get_logger("create_labels")
    main()
        if key in d_lookup:
            new_issue_number = issue["migration"].get("new_issue_number")

            if not new_issue_number:
                # issue has apparently not been created in github
                logger.error(key)
                raise Exception(
                    f"New {issue_number} does not exist in github!")

            d_lookup[key]["new_issue_number"] = new_issue_number

        else:
            # issue must be closed, because we haven't downloaded it
            missing_dependency_issues.append(key)

    # build new dependencies file
    depends = build_new_dependencies(depends, d_lookup)

    depends = drop_null_dependencies(depends)

    with open(DEST_FILE, "w") as fout:
        fout.write(json.dumps(depends))

    with open(MISSING_DEPEND_FILE, "w") as fout:
        fout.write(json.dumps(missing_dependency_issues))


if __name__ == "__main__":
    logger = get_logger("process_dependencies")
    main()
Ejemplo n.º 18
0
    issues = _utils.load_issues(DIR)
    sorted_issues = sort_issues(issues)

    for issue_element in sorted_issues:
        issue = issue_element.get("data")
        issue_number = issue.get("migration").get("new_issue_number")
        pos = issue.get("migration").get("pipeline").get("position")

        # TODO: TEST ONLY. Prod just use existing pipeline ID
        # pipe_id = issue.get("migration").get("pipeline").get("pipeline_id")
        pipe_name = issue.get("migration").get("pipeline").get("pipeline_name")
        pipe_id = replace_pipe(test_pipes, pipe_name)

        payload = {"pipeline_id": pipe_id, "position": pos}

        res = zenhub_request(DEST_REPO_ID, WORKSPACE_ID, issue_number, payload)

        if not res:
            logger.error(f"ERROR: {issue['path']}")
            issue["migration"]["pipeline_processed"] = False
        else:
            logger.info(issue["path"])
            issue["migration"]["pipeline_processed"] = True

        write_issue(issue, DIR)


if __name__ == "__main__":
    logger = get_logger("update_pipelines")
    main()