def upsert_change_request(repository: Repository, provider: str, external_id: str, data: dict = None) -> Response: lock_key = "hook:cr:{repo_id}:{provider}:{cr_xid}".format( repo_id=repository.id, provider=provider, cr_xid=external_id) with redis.lock(lock_key): json = data.copy() if data else {} json["external_id"] = external_id json["provider"] = provider cr = ChangeRequest.query.filter( ChangeRequest.repository_id == repository.id, ChangeRequest.provider == provider, ChangeRequest.external_id == external_id, ).first() if cr: return client.put( "/repos/{}/change-requests/{}".format( repository.get_full_name(), cr.number), json=json, ) return client.post("/repos/{}/change-requests".format( repository.get_full_name()), json=json)
def upsert_build(repository: Repository, provider: str, external_id: str, data: dict = None) -> Response: lock_key = 'hook:build:{repo_id}:{provider}:{build_xid}'.format( repo_id=repository.id, provider=provider, build_xid=external_id, ) with redis.lock(lock_key): json = data.copy() if data else {} json['external_id'] = external_id json['provider'] = provider build = Build.query.filter( Build.provider == provider, Build.external_id == external_id, ).first() if build: return client.put('/repos/{}/builds/{}'.format( repository.get_full_name(), build.number, ), json=json) return client.post('/repos/{}/builds'.format( repository.get_full_name(), ), json=json)
def identify_revision(repository: Repository, treeish: str): """ Attempt to transform a a commit-like reference into a valid revision. """ # try to find it from the database first if len(treeish) == 40: revision = Revision.query.filter( Revision.repository_id == repository.id, Revision.sha == treeish ).first() if revision: return revision try: vcs = repository.get_vcs() except UnknownRepositoryBackend: return None vcs.ensure(update_if_exists=False) lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id) # lock this update to avoild piling up duplicate fetch/save calls with redis.lock(lock_key, expire=30): try: commit = next(vcs.log(parent=treeish, limit=1)) except UnknownRevision: vcs.update() commit = next(vcs.log(parent=treeish, limit=1)) revision, _ = commit.save(repository) return revision
def identify_revision(repository: Repository, treeish: str): """ Attempt to transform a a commit-like reference into a valid revision. """ # try to find it from the database first if len(treeish) == 40: revision = Revision.query.filter( Revision.repository_id == repository.id, Revision.sha == treeish).first() if revision: return revision vcs = repository.get_vcs() if not vcs: return vcs.ensure(update_if_exists=False) try: commit = next(vcs.log(parent=treeish, limit=1)) except UnknownRevision: vcs.update() commit = next(vcs.log(parent=treeish, limit=1)) revision, _ = commit.save(repository) return revision
def get(self, repo: Repository): """ Return a tree of testcases for the given repository. """ latest_build = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.repository_id == repo.id, Build.result == Result.passed, Build.status == Status.finished, ).order_by(Build.date_created.desc(), ).first() if not latest_build: current_app.logger.info( 'no successful builds found for repository') return self.respond({'entries': [], 'trail': []}) path = '/repos/{}/builds/{}/file-coverage-tree'.format( repo.get_full_name(), latest_build.number, ) response = api_client.get(path, request=request) data = json.loads(response.data) data['build'] = build_schema.dump(latest_build).data return self.respond(data)
def fetch_revisions(self, repo: Repository): if current_app.config.get('MOCK_REVISIONS'): return Revision.query \ .filter(Revision.repository_id == repo.id) \ .order_by(Revision.date_created.desc()) \ .all() vcs = repo.get_vcs() if not vcs: return [] vcs.ensure(update_if_exists=True) branch = request.args.get('branch', vcs.get_default_branch()) parent = request.args.get('parent') vcs_log = list(vcs.log( limit=min(int(request.args.get('per_page', 50)), 50), parent=parent, branch=branch, )) if not vcs_log: return [] existing = Revision.query \ .options(joinedload('author')) \ .filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log) ) revisions_map = {r.sha: r for r in existing} return [revisions_map.get(item.sha, item) for item in vcs_log]
def make_instance(self, data): if self.context.get("repository"): obj = self.context["repository"] for key, value in data.items(): if getattr(obj, key) != value: setattr(obj, key, value) else: obj = Repository(**data) return obj
def add(repository, url, backend, active): raise NotImplementedError provider, owner_name, repo_name = repository.split("/", 2) repo = Repository( url=url, owner_name=slugify(owner_name), provider=RepositoryProvider(provider), name=slugify(repo_name), backend=getattr(RepositoryBackend, backend), status=RepositoryStatus.active if active else RepositoryStatus.inactive, ) db.session.add(repo) db.session.commit()
def fetch_revisions(self, repo: Repository, page: int, parent: str = None) -> Tuple[list, bool]: per_page = min(int(request.args.get("per_page", 50)), 50) if current_app.config.get("MOCK_REVISIONS"): results = (Revision.query.filter( Revision.repository_id == repo.id).order_by( Revision.date_created.desc()).offset( (page - 1) * per_page).limit(per_page + 1).all()) has_more = len(results) > per_page return results[:per_page], has_more try: vcs = repo.get_vcs() except UnknownRepositoryBackend: return [], False branch = request.args.get("branch") if not parent and branch is None: branch = vcs.get_default_branch() vcs_log = list( vcs.log( limit=per_page + 1, offset=(page - 1) * per_page, parent=parent, branch=branch, )) if not vcs_log: return [], False has_more = len(vcs_log) > per_page vcs_log = vcs_log[:per_page] existing = Revision.query.options(joinedload("author")).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log)) revisions_map = {r.sha: r for r in existing} results = [] for item in vcs_log: try: results.append(revisions_map[item.sha]) except KeyError: item.repository_id = repo.id results.append(item) return results, has_more
def add(repository_url, backend, active): repo = Repository( url=repository_url, backend=getattr(RepositoryBackend, backend), status=RepositoryStatus.active if active else RepositoryStatus.inactive, provider=RepositoryProvider.native, ) db.session.add(repo) db.session.commit() if active: # do initial import in process import_repo(repo_id=repo.id)
def delete(self, repo: Repository): """ Deactivate a repository. """ if repo.status == RepositoryStatus.inactive: return self.respond(status=202) with db.session.begin_nested(): repo.status = RepositoryStatus.inactive db.session.add(repo) db.session.flush() delete_repo.delay(repo_id=repo.id) return self.respond(status=202)
def add(repository_full_name, url, backend, active): raise NotImplementedError owner_name, repo_name = repository_full_name.split('/', 1) repo = Repository( url=url, owner_name=slugify(owner_name), name=slugify(repo_name), backend=getattr(RepositoryBackend, backend), status=RepositoryStatus.active if active else RepositoryStatus.inactive, ) db.session.add(repo) db.session.commit() if active: # do initial import in process import_repo(repo_id=repo.id)
def get(self): """ Return a list of builds. """ # tenants automatically restrict this query but we dont want # to include public repos tenant = auth.get_current_tenant() if not tenant.repository_ids: return self.respond([]) query = ( Build.query.options( joinedload("repository"), joinedload("revision"), subqueryload_all("revision.authors"), subqueryload_all("stats"), subqueryload_all("authors"), ) .filter(Build.repository_id.in_(tenant.repository_ids)) .order_by(Build.date_created.desc()) ) user = request.args.get("user") if user: if user == "me": user = auth.get_current_user() else: user = User.query.get(user) if not user: return self.respond([]) query = query.filter( Build.authors.any( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA ) ) ) ) repository = request.args.get("repository") if repository: repo = Repository.from_full_name(repository) if not repo: return self.respond([]) query = query.filter(Build.repository_id == repo.id) return self.paginate_with_schema(builds_schema, query)
def get_revisions(repo: Repository, branch: str = None, limit: int = 200) -> List[str]: if current_app.config.get("MOCK_REVISIONS"): return (db.session.query(Source.revision_sha).filter( Source.repository_id == repo.id).order_by( Source.date_created.desc()).limit(limit).all()) try: vcs = repo.get_vcs() except UnknownRepositoryBackend: return [] if branch is None: branch = vcs.get_default_branch() return [r.sha for r in vcs.log(limit=limit, branch=branch)]
def get(self, repo: Repository): """ Return a list of revisions for the given repository. """ cache_key = self.cache_key.format(repo_id=repo.id.hex) result = redis.get(cache_key) if result is None: vcs = repo.get_vcs() if not vcs: return self.respond([]) vcs.ensure() result = vcs.get_known_branches() redis.setex(cache_key, json.dumps(result), self.cache_expire) else: result = json.loads(result) return self.respond([{"name": r} for r in result])
def get(self): """ Return a list of change requests. """ tenant = auth.get_current_tenant() if not tenant.repository_ids: return self.respond([]) query = (ChangeRequest.query.options( joinedload("head_revision"), joinedload("parent_revision", innerjoin=True), joinedload("author"), ).filter(ChangeRequest.repository_id.in_( tenant.repository_ids)).order_by( ChangeRequest.date_created.desc())) user = request.args.get("user") if user: if user == "me": user = auth.get_current_user() else: user = User.query.get(user) if not user: return self.respond([]) query = query.filter( ChangeRequest.author_id.in_( db.session.query(Author.id).filter( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA ))))) repository = request.args.get("repository") if repository: repo = Repository.from_full_name(repository) if not repo: return self.respond([]) query = query.filter(ChangeRequest.repository_id == repo.id) schema = ChangeRequestWithBuildSchema(many=True, strict=True) return self.paginate_with_schema(schema, query)
def load_revisions(repo: models.Repository, num_passes=100) -> models.Revision: vcs = repo.get_vcs() vcs.ensure() num = 0 has_more = True parent = None first_revision = None while has_more and num < num_passes: has_more = False for commit in vcs.log(parent=parent): revision, created = commit.save(repo) if first_revision is None: first_revision = revision db.session.commit() if parent == commit.sha: break parent = commit.sha has_more = True num += 1 return first_revision
def fetch_revisions( self, repo: Repository, page: int, parent: str = None ) -> Tuple[list, bool]: if current_app.config.get("MOCK_REVISIONS"): return Revision.query.filter(Revision.repository_id == repo.id).order_by( Revision.date_created.desc() ).all() vcs = repo.get_vcs() if not vcs: return [] per_page = min(int(request.args.get("per_page", 50)), 50) branch = request.args.get("branch") if not parent and branch is None: branch = vcs.get_default_branch() vcs_log = list( vcs.log( limit=per_page + 1, offset=(page - 1) * per_page, parent=parent, branch=branch, ) ) if not vcs_log: return [], False has_more = len(vcs_log) > per_page vcs_log = vcs_log[:per_page] existing = Revision.query.options(joinedload("author")).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log) ) revisions_map = {r.sha: r for r in existing} return [revisions_map.get(item.sha, item) for item in vcs_log], has_more
def post(self): """ Activate a GitHub repository. """ repo_name = (request.get_json() or {}).get("name") if not repo_name: return self.error("missing repo_name parameter") owner_name, repo_name = repo_name.split("/", 1) user = auth.get_current_user() provider = GitHubRepositoryProvider(cache=False) try: repo_data = provider.get_repo(user=user, owner_name=owner_name, repo_name=repo_name) except IdentityNeedsUpgrade as exc: return self.respond( { "provider": "github", "error": "identity_needs_upgrade", "url": exc.get_upgrade_url(), }, 401, ) if Permission.admin not in repo_data["permission"]: return self.respond( {"message": "Insufficient permissions to activate repository"}, 403) lock_key = Repository.get_lock_key(RepositoryProvider.github, owner_name, repo_name) with redis.lock(lock_key): try: with db.session.begin_nested(): # bind various github specific attributes repo = Repository( backend=RepositoryBackend.git, provider=RepositoryProvider.github, status=RepositoryStatus.active, external_id=str(repo_data["id"]), owner_name=owner_name, name=repo_name, url=repo_data["url"], data=repo_data["config"], ) db.session.add(repo) db.session.flush() except IntegrityError: repo = (Repository.query.unrestricted_unsafe().filter( Repository.provider == RepositoryProvider.github, Repository.external_id == str(repo_data["id"]), ).first()) # it's possible to get here if the "full name" already exists assert repo needs_configured = repo.status == RepositoryStatus.inactive if needs_configured: repo.status = RepositoryStatus.active db.session.add(repo) else: needs_configured = True if needs_configured: # generate a new private key for use on github key = ssh.generate_key() db.session.add( ItemOption(item_id=repo.id, name="auth.private-key", value=key.private_key)) # register key with github # TODO(dcramer): we should store this key reference so we can delete it # when the user deactivates the repo provider.add_key(user=user, repo_name=repo_name, owner_name=owner_name, key=key) db.session.commit() try: with db.session.begin_nested(): db.session.add( RepositoryAccess( repository_id=repo.id, user_id=user.id, permission=repo_data["permission"], )) db.session.flush() except IntegrityError: pass db.session.commit() return self.respond_with_schema(repo_schema, repo, 201)
def post(self): """ Activate a GitHub repository. """ repo_name = (request.get_json() or {}).get('name') if not repo_name: return self.error('missing repo_name parameter') user = auth.get_current_user() try: github, _ = self.get_github_client(user) except IdentityNeedsUpgrade as exc: return self.respond( { 'error': 'identity_needs_upgrade', 'url': exc.get_upgrade_url(), }, 401) # fetch repository details using their credentials repo_data = github.get('/repos/{}'.format(repo_name)) owner_name, repo_name = repo_data['full_name'].split('/', 1) repo = Repository.query.filter( Repository.provider == RepositoryProvider.github, Repository.external_id == str(repo_data['id']), ).first() if repo is None: # bind various github specific attributes repo = Repository( backend=RepositoryBackend.git, provider=RepositoryProvider.github, status=RepositoryStatus.active, external_id=str(repo_data['id']), owner_name=owner_name, name=repo_name, url=repo_data['clone_url'], data={'github': { 'full_name': repo_data['full_name'] }}, ) db.session.add(repo) # generate a new private key for use on github key = ssh.generate_key() db.session.add( ItemOption( item_id=repo.id, name='auth.private-key', value=key.private_key, )) # register key with github github.post('/repos/{}/keys'.format( repo.data['github']['full_name']), json={ 'title': 'zeus', 'key': key.public_key, 'read_only': True, }) # we need to commit before firing off the task db.session.commit() import_repo.delay(repo_id=repo.id) try: with db.session.begin_nested(): db.session.add( RepositoryAccess( repository_id=repo.id, user_id=user.id, )) db.session.flush() except IntegrityError: pass db.session.commit() return self.respond_with_schema(repo_schema, repo, 201)
def post(self, org: Organization): """ Create a new repository. """ provider = (request.get_json() or {}).get('provider', 'native') if provider == 'github': schema = github_repo_schema elif provider == 'native': schema = repo_schema else: raise NotImplementedError result = self.schema_from_request(schema, partial=True) if result.errors: return self.respond(result.errors, 403) data = result.data if provider == 'github': # get their credentials identity = Identity.query.filter( Identity.provider == 'github', Identity.user_id == auth.get_current_user().id ).first() if 'repo' not in identity.config['scopes']: return self.respond( { 'needUpgrade': True, 'upgradeUrl': '/auth/github/upgrade' }, 401 ) assert identity # fetch repository details using their credentials github = GitHubClient(token=identity.config['access_token']) repo_data = github.get('/repos/{}'.format(data['github_name'])) repo, created = Repository.query.filter( Repository.provider == RepositoryProvider.github, Repository.external_id == str(repo_data['id']), ).first(), False if repo is None: # bind various github specific attributes repo, created = Repository( organization=org, backend=RepositoryBackend.git, provider=RepositoryProvider.github, status=RepositoryStatus.active, external_id=str(repo_data['id']), url=repo_data['clone_url'], data={'github': { 'full_name': repo_data['full_name'] }}, ), True db.session.add(repo) # generate a new private key for use on github key = ssh.generate_key() db.session.add( ItemOption( item_id=repo.id, name='auth.private-key', value=key.private_key, ) ) # register key with github github.post( '/repos/{}/keys'.format(repo.data['github']['full_name']), json={ 'title': 'zeus', 'key': key.public_key, 'read_only': True, } ) elif provider == 'native': repo, created = Repository( organization=org, status=RepositoryStatus.active, **data, ), True db.session.add(repo) db.session.flush() try: with db.session.begin_nested(): db.session.add( RepositoryAccess( organization=org, repository=repo, user=auth.get_current_user(), ) ) db.session.flush() except IntegrityError: raise pass db.session.commit() if created: import_repo.delay(repo_id=repo.id) return self.respond_with_schema(repo_schema, repo)
def post(self): """ Activate a GitHub repository. """ repo_name = (request.get_json() or {}).get('name') if not repo_name: return self.error('missing repo_name parameter') owner_name, repo_name = repo_name.split('/', 1) user = auth.get_current_user() provider = GitHubRepositoryProvider(cache=False) try: repo_data = provider.get_repo(user=user, owner_name=owner_name, repo_name=repo_name) except IdentityNeedsUpgrade as exc: return self.respond( { 'provider': 'github', 'error': 'identity_needs_upgrade', 'url': exc.get_upgrade_url(), }, 401) if not repo_data['admin']: return self.respond( { 'message': 'Insufficient permissions to activate repository', }, 403) lock_key = 'repo:{provider}/{owner_name}/{repo_name}'.format( provider='github', owner_name=owner_name, repo_name=repo_name, ) with redis.lock(lock_key): try: with db.session.begin_nested(): # bind various github specific attributes repo = Repository( backend=RepositoryBackend.git, provider=RepositoryProvider.github, status=RepositoryStatus.active, external_id=str(repo_data['id']), owner_name=owner_name, name=repo_name, url=repo_data['url'], data=repo_data['config'], ) db.session.add(repo) db.session.flush() except IntegrityError: repo = Repository.query.unrestricted_unsafe().filter( Repository.provider == RepositoryProvider.github, Repository.external_id == str(repo_data['id']), ).first() # it's possible to get here if the "full name" already exists assert repo needs_configured = repo.status == RepositoryStatus.inactive if needs_configured: repo.status = RepositoryStatus.active db.session.add(repo) else: needs_configured = True if needs_configured: # generate a new private key for use on github key = ssh.generate_key() db.session.add( ItemOption( item_id=repo.id, name='auth.private-key', value=key.private_key, )) # register key with github provider.add_key( user=user, repo_name=repo_name, owner_name=owner_name, key=key, ) # we need to commit before firing off the task db.session.commit() import_repo.delay(repo_id=repo.id) try: with db.session.begin_nested(): db.session.add( RepositoryAccess( repository_id=repo.id, user_id=user.id, )) db.session.flush() except IntegrityError: pass db.session.commit() return self.respond_with_schema(repo_schema, repo, 201)