def fetch_revisions(self, repo: Repository): if current_app.config.get('MOCK_REVISIONS'): return Revision.query \ .filter(Revision.repository_id == repo.id) \ .order_by(Revision.date_created.desc()) \ .all() vcs = repo.get_vcs() if not vcs: return [] vcs.ensure(update_if_exists=True) branch = request.args.get('branch', vcs.get_default_branch()) parent = request.args.get('parent') vcs_log = list(vcs.log( limit=min(int(request.args.get('per_page', 50)), 50), parent=parent, branch=branch, )) if not vcs_log: return [] existing = Revision.query \ .options(joinedload('author')) \ .filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log) ) revisions_map = {r.sha: r for r in existing} return [revisions_map.get(item.sha, item) for item in vcs_log]
def identify_revision(repository: Repository, treeish: str): """ Attempt to transform a a commit-like reference into a valid revision. """ # try to find it from the database first if len(treeish) == 40: revision = Revision.query.filter( Revision.repository_id == repository.id, Revision.sha == treeish).first() if revision: return revision vcs = repository.get_vcs() if not vcs: return vcs.ensure(update_if_exists=False) try: commit = next(vcs.log(parent=treeish, limit=1)) except UnknownRevision: vcs.update() commit = next(vcs.log(parent=treeish, limit=1)) revision, _ = commit.save(repository) return revision
def identify_revision(repository: Repository, treeish: str): """ Attempt to transform a a commit-like reference into a valid revision. """ # try to find it from the database first if len(treeish) == 40: revision = Revision.query.filter( Revision.repository_id == repository.id, Revision.sha == treeish ).first() if revision: return revision try: vcs = repository.get_vcs() except UnknownRepositoryBackend: return None vcs.ensure(update_if_exists=False) lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id) # lock this update to avoild piling up duplicate fetch/save calls with redis.lock(lock_key, expire=30): try: commit = next(vcs.log(parent=treeish, limit=1)) except UnknownRevision: vcs.update() commit = next(vcs.log(parent=treeish, limit=1)) revision, _ = commit.save(repository) return revision
def fetch_revisions(self, repo: Repository, page: int, parent: str = None) -> Tuple[list, bool]: per_page = min(int(request.args.get("per_page", 50)), 50) if current_app.config.get("MOCK_REVISIONS"): results = (Revision.query.filter( Revision.repository_id == repo.id).order_by( Revision.date_created.desc()).offset( (page - 1) * per_page).limit(per_page + 1).all()) has_more = len(results) > per_page return results[:per_page], has_more try: vcs = repo.get_vcs() except UnknownRepositoryBackend: return [], False branch = request.args.get("branch") if not parent and branch is None: branch = vcs.get_default_branch() vcs_log = list( vcs.log( limit=per_page + 1, offset=(page - 1) * per_page, parent=parent, branch=branch, )) if not vcs_log: return [], False has_more = len(vcs_log) > per_page vcs_log = vcs_log[:per_page] existing = Revision.query.options(joinedload("author")).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log)) revisions_map = {r.sha: r for r in existing} results = [] for item in vcs_log: try: results.append(revisions_map[item.sha]) except KeyError: item.repository_id = repo.id results.append(item) return results, has_more
def get_revisions(repo: Repository, branch: str = None, limit: int = 200) -> List[str]: if current_app.config.get("MOCK_REVISIONS"): return (db.session.query(Source.revision_sha).filter( Source.repository_id == repo.id).order_by( Source.date_created.desc()).limit(limit).all()) try: vcs = repo.get_vcs() except UnknownRepositoryBackend: return [] if branch is None: branch = vcs.get_default_branch() return [r.sha for r in vcs.log(limit=limit, branch=branch)]
def get(self, repo: Repository): """ Return a list of revisions for the given repository. """ cache_key = self.cache_key.format(repo_id=repo.id.hex) result = redis.get(cache_key) if result is None: vcs = repo.get_vcs() if not vcs: return self.respond([]) vcs.ensure() result = vcs.get_known_branches() redis.setex(cache_key, json.dumps(result), self.cache_expire) else: result = json.loads(result) return self.respond([{"name": r} for r in result])
def load_revisions(repo: models.Repository, num_passes=100) -> models.Revision: vcs = repo.get_vcs() vcs.ensure() num = 0 has_more = True parent = None first_revision = None while has_more and num < num_passes: has_more = False for commit in vcs.log(parent=parent): revision, created = commit.save(repo) if first_revision is None: first_revision = revision db.session.commit() if parent == commit.sha: break parent = commit.sha has_more = True num += 1 return first_revision
def fetch_revisions( self, repo: Repository, page: int, parent: str = None ) -> Tuple[list, bool]: if current_app.config.get("MOCK_REVISIONS"): return Revision.query.filter(Revision.repository_id == repo.id).order_by( Revision.date_created.desc() ).all() vcs = repo.get_vcs() if not vcs: return [] per_page = min(int(request.args.get("per_page", 50)), 50) branch = request.args.get("branch") if not parent and branch is None: branch = vcs.get_default_branch() vcs_log = list( vcs.log( limit=per_page + 1, offset=(page - 1) * per_page, parent=parent, branch=branch, ) ) if not vcs_log: return [], False has_more = len(vcs_log) > per_page vcs_log = vcs_log[:per_page] existing = Revision.query.options(joinedload("author")).filter( Revision.repository_id == repo.id, Revision.sha.in_(c.sha for c in vcs_log) ) revisions_map = {r.sha: r for r in existing} return [revisions_map.get(item.sha, item) for item in vcs_log], has_more