Ejemplo n.º 1
0
 def test_can_get_filename_for_cache(self):
     info = VstsInfo(None, None)
     info.config['DEFAULT']['vsts_instance_base'] = "company.visualstudio.com"
     info.config['DEFAULT']['cash_prefix'] = "vsts"
     url = "https://" + info.instance_base + "/my_project"+ "?foo=bar&foofoo=barbar"
     expected = info.cache_folder + "\\" + info.cash_prefix +".my_project" + "(qm)" + "foo=bar&foofoo=barbar" +".json"
     cache_file_name = info.build_file_name(url)
     self.assertEqual(cache_file_name, expected)
Ejemplo n.º 2
0
 def test_can_get_url_from_config(self):
     info = VstsInfo(None, None)
     base = info.instance_base
     self.assertNotEqual(base, None)
Ejemplo n.º 3
0
 def test_personal_access_token_starts_with_colon(self):
     info = VstsInfo(None, None)
     actual = info.personal_access_token
     self.assertTrue(actual.startswith(':'))
Ejemplo n.º 4
0
 def test_get_request_settings(self):
     info = VstsInfo(None, None)
     settings = info.get_request_settings()
     self.assertEqual(settings['instance'], info.instance )
Ejemplo n.º 5
0
 def test_has_cash_prefix(self):
     info = VstsInfo(None, None)
     base = info.cash_prefix
     self.assertNotEqual(base, None)
Ejemplo n.º 6
0
    CONFIG = configparser.ConfigParser()
    CONFIG.read_file(open('default.cfg'))

    IS_TRUE = [
        'True', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly', 'uh-huh'
    ]

    RUN_MULTITHREADED = CONFIG['RepoSync']['RunMultiThreaded'] in IS_TRUE
    SERVER_NO_IP = CONFIG['RepoSync']['ServerNoIp']
    SERVER_IP = CONFIG['RepoSync']['ServerIp']
    GIT_ROOT_FOLDER_PATH = CONFIG['RepoSync']['GitRootFolderPath']
    SAVE_SCRIPTS_FOLDER = CONFIG['RepoSync']['SaveScriptsFolderPath']
    IGNORE_VSTS_CACHE = CONFIG['RepoSync']["IgnoreVstsCache"] in IS_TRUE

    #for this script we always want to ignore the cache
    VSTS = VstsInfo(None, None, ignore_cache=IGNORE_VSTS_CACHE)

    PTU_WORKER = ProjectsTeamsUsersWorker(VSTS.get_request_settings(),
                                          VSTS.project_whitelist, VSTS)
    PROJECTS_URL = PTU_WORKER.get_vsts_projects_url()
    RAW = PTU_WORKER.vsts.make_request(PROJECTS_URL)
    PROJECTS = RAW["value"]

    REPO_WORKER = RepositoriesWorker(VSTS.get_request_settings(), VSTS)

    REPO_SYNC = RepoSync(GIT_ROOT_FOLDER_PATH, SAVE_SCRIPTS_FOLDER,
                         SERVER_NO_IP, SERVER_IP)
    LOCAL_REPOS = REPO_SYNC.get_local_git_repos(GIT_ROOT_FOLDER_PATH)

    REPO_NOIP_STORE = []
Ejemplo n.º 7
0
        starts doing the crawling work
        """
        graph = GraphBuilder().GetNewGraph()
        proj = self.map_and_save_project(raw_data, graph)
        if proj is not None:
            self.add_teams_to_repo(proj, graph)
        print("Finished Adding Projects Teams and Users")

if __name__ == '__main__':
    print("starting Projects Teams and Users")
    #set to false for easier debugging, but it is slower
    RUN_MULTITHREADED = True

    GRAPH = GraphBuilder()
    GRAPH.create_unique_constraints()

    VSTS = VstsInfo(None, None, ignore_cache=True)

    #tod clean up this signature mess and just pass in VSTS
    WORKER = ProjectsTeamsUsersWorker(VSTS.get_request_settings(), VSTS.project_whitelist, VSTS)
    PROJECTS_URL = WORKER.get_vsts_projects_url()
    RAW = WORKER.vsts.make_request(PROJECTS_URL)
    PROJECTS = RAW["value"]

    if RUN_MULTITHREADED:
        with Pool(5) as p:
            p.map(WORKER.crawl, PROJECTS)
    else:
        for PROJ in PROJECTS:
            WORKER.crawl(PROJ)
Ejemplo n.º 8
0
                proj_tx.commit()

            repo.BelongsTo.add(proj)
            print("Adding Repo: ")
            print(repo.Name)
            transaction = graph.begin()
            transaction.merge(repo)
            transaction.graph.push(repo)
        print("Finished mapping repos")


if __name__ == '__main__':
    print("starting Repositories Crawl")
    #set to false for easier debugging, but it is slower
    run_multithreaded = True

    GRAPH = GraphBuilder()
    GRAPH.create_unique_constraints()

    #If you feel your cache is up to date, then set ignore_cache to False.
    VSTS = VstsInfo(None, None, ignore_cache=True)
    PULL_REQUEST_STATUS = "Completed"
    WORKER = RepositoriesWorker(VSTS.get_request_settings(), VSTS)

    if run_multithreaded:
        with Pool(5) as p:
            p.map(WORKER.crawl, VSTS.project_whitelist)
    else:
        for proj in VSTS.project_whitelist:
            WORKER.crawl(proj)
Ejemplo n.º 9
0
            ids.append(_id.get("Id"))
        pull_reqs = None
        return ids

    def add_pull_request_work_items(self, project_name):
        """
        Helper method to call crawl
        """
        print("Getting work items for project " + project_name)
        repo_ids = self.get_repository_ids(project_name)
        for repo_id in repo_ids:
            pull_reqs = self.get_pull_request_ids(repo_id)
            for pull_request_id in pull_reqs:
                self.crawl(repo_id, pull_request_id)


if __name__ == '__main__':
    print("starting Work Items linked to Pull Requests")
    #set to false for easier debugging, but it is slower
    RUN_MULTITHREADED = False

    VSTS = VstsInfo(None, None, ignore_cache=False)
    WORKER = PullReqeustWorkItemsWorker(VSTS.get_request_settings(), VSTS)
    if RUN_MULTITHREADED:
        with Pool(5) as p:
            p.map(WORKER.add_pull_request_work_items, VSTS.project_whitelist)
    else:
        for proj_name in VSTS.project_whitelist:

            WORKER.add_pull_request_work_items(proj_name)
Ejemplo n.º 10
0
    def build_relationship(self, graph, raw_link):
        """
        finds the source and target work items and links them together
        """
        source = self.get_work_item(raw_link.get("sourceId"), graph)
        target = self.get_work_item(raw_link.get("targetId"), graph)
        link_type = self.parse_link_type(raw_link.get("linkType"))
        link = Relationship(source.__ogm__.node, link_type,
                            target.__ogm__.node)
        self.set_link_props(link, raw_link)
        return link


if __name__ == '__main__':
    print("starting WorkItemLinks")
    #set to false for easier debugging, but it is slower
    RUN_MULTITHREADED = False

    GRAPH = GraphBuilder()
    GRAPH.create_unique_constraints()

    VSTS = VstsInfo(None, None)
    WORKER = WorkItemLinksWorker(VSTS)

    if RUN_MULTITHREADED:
        with Pool(5) as p:
            p.map(WORKER.crawl, VSTS.project_whitelist)
    else:
        WORKER.crawl_projects(VSTS.project_whitelist)