def crawl(self, repository_id, pull_request_id): """ Entry point for this class """ if (repository_id is None) or (pull_request_id is None): print("could not get work item one of the id's was None") print(repository_id) print(pull_request_id) return graph = GraphBuilder().GetNewGraph() pull_request = PullRequest.select(graph, pull_request_id).first() if pull_request is None: print("Could not continue, pullrequest was not in db") return url = self.pull_request_workitems_url(repository_id, pull_request.Id) data = self.get_data(url) if data is None: return if "value" not in data: logging.info("no work items linked") return for raw in data["value"]: work_item = self.make_work_item(raw) if work_item is not None: self.link_to_pull_request(work_item, pull_request) self.fill_in_the_rest(work_item, graph) transaction = graph.begin() transaction.merge(work_item) transaction.graph.push(work_item)
def crawl(self, project_name): """ Gets Repositories for a given project """ url = ( "%s/DefaultCollection/%s/_apis/git/repositories?api-version=%s" % (self.instance, project_name, self.api_version)) data = self.vsts.make_request(url) for r in data["value"]: graph = GraphBuilder().GetNewGraph() #print(r["id"]) repo = Repository() repo.Id = r.get("id") repo.Name = r.get("name") repo.Url = r.get("url") raw_proj = r.get("project") proj = Project() proj.Id = raw_proj.get("id") proj.Name = raw_proj.get("name") proj.Url = raw_proj.get("url") repo_proj = Project.select(graph, proj.Id) '''todo: may not need to do this.''' if repo_proj is not None: proj_tx = graph.begin() proj_tx.create(proj) proj_tx.commit() repo.BelongsTo.add(proj) print("Adding Repo: ") print(repo.Name) transaction = graph.begin() transaction.merge(repo) transaction.graph.push(repo) print("Finished mapping repos")