Exemplo n.º 1
0
def _process_changeset_view(self, request):
    request.perm.require('CHANGESET_VIEW')

    new = request.args.get('new')
    new_path = request.args.get('new_path')
    old = request.args.get('old')
    repository_name = request.args.get('reponame')

    # -- support for the revision log ''View changes'' form,
    #    where we need to give the path and revision at the same time
    if old and '@' in old:
        old, old_path = old.split('@', 1)
    if new and '@' in new:
        new, new_path = new.split('@', 1)

    manager = RepositoryManager(self.env)

    if repository_name:
        repository = manager.get_repository(repository_name)
    else:
        repository_name, repository, new_path = manager.get_repository_by_path(new_path)

    repository_url = repository.params.get('url', '')

    if _valid_github_request(request) and re.match(r'^https?://(?:www\.)?github\.com/', repository_url):
        url = repository_url.rstrip('/') + '/'

        if old:
            url += 'compare/' + old + '...' + new
        else:
            url += 'commit/' + new

        request.redirect(url)
    else:
        return _old_process_changeset_view(self, request)
Exemplo n.º 2
0
 def _sync(self, reponame, rev, clean):
     rm = RepositoryManager(self.env)
     if reponame == '*':
         if rev is not None:
             raise TracError(_('Cannot synchronize a single revision '
                               'on multiple repositories'))
         repositories = rm.get_real_repositories()
     else:
         if is_default(reponame):
             reponame = ''
         repos = rm.get_repository(reponame)
         if repos is None:
             raise TracError(_("Repository '%(repo)s' not found",
                               repo=reponame or '(default)'))
         if rev is not None:
             repos.sync_changeset(rev)
             printout(_('%(rev)s resynced on %(reponame)s.', rev=rev,
                        reponame=repos.reponame or '(default)'))
             return
         repositories = [repos]
     
     db = self.env.get_db_cnx()
     for repos in sorted(repositories, key=lambda r: r.reponame):
         printout(_('Resyncing repository history for %(reponame)s... ',
                    reponame=repos.reponame or '(default)'))
         repos.sync(self._sync_feedback, clean=clean)
         cursor = db.cursor()
         cursor.execute("SELECT count(rev) FROM revision WHERE repos=%s",
                        (repos.id,))
         for cnt, in cursor:
             printout(ngettext('%(num)s revision cached.',
                               '%(num)s revisions cached.', num=cnt))
     printout(_('Done.'))
Exemplo n.º 3
0
Arquivo: admin.py Projeto: t2y/trac
    def _sync(self, reponame, rev, clean):
        rm = RepositoryManager(self.env)
        if reponame == '*':
            if rev is not None:
                raise TracError(_('Cannot synchronize a single revision '
                                  'on multiple repositories'))
            repositories = rm.get_real_repositories()
        else:
            if is_default(reponame):
                reponame = ''
            repos = rm.get_repository(reponame)
            if repos is None:
                raise TracError(_("Repository '%(repo)s' not found",
                                  repo=reponame or '(default)'))
            if rev is not None:
                repos.sync_changeset(rev)
                printout(_('%(rev)s resynced on %(reponame)s.', rev=rev,
                           reponame=repos.reponame or '(default)'))
                return
            repositories = [repos]

        for repos in sorted(repositories, key=lambda r: r.reponame):
            printout(_('Resyncing repository history for %(reponame)s... ',
                       reponame=repos.reponame or '(default)'))
            repos.sync(self._sync_feedback, clean=clean)
            for cnt, in self.env.db_query(
                    "SELECT count(rev) FROM revision WHERE repos=%s",
                    (repos.id,)):
                printout(ngettext('%(num)s revision cached.',
                                  '%(num)s revisions cached.', num=cnt))
        printout(_('Done.'))
    def for_request(cls, env, req, create=False):
        """
        Return a **single** subscription for a HTTP request.
        """
        reponame = req.args.get('reponame')
        rm = RepositoryManager(env)
        repos = rm.get_repository(reponame)

        path = req.args.get('path') or ''
        rev = req.args.get('rev') or repos.youngest_rev

        dict_ = {
            'user': req.authname,
            'type': req.args.get('realm'),
            'path': '',
            'rev': '',
            'repos': '',
        }

        if dict_['type'] == 'attachment':
            dict_['path'] = path

        if dict_['type'] == 'changeset':
            dict_['rev'] = path[1:]
            dict_['repos'] = repos.reponame

        if dict_['type'] == 'browser':
            if len(path) == 0:
                dict_['path'] = '/'
            else:
                dict_['path'] = path[1:]
            dict_['rev'] = rev
            dict_['repos'] = repos.reponame

        return cls._from_dict(env, dict_, create=create)
Exemplo n.º 5
0
    def for_request(cls, env, req, create=False):
        """
        Return a **single** subscription for a HTTP request.
        """
        reponame = req.args.get('reponame')
        rm = RepositoryManager(env)
        repos = rm.get_repository(reponame)

        path = req.args.get('path') or ''
        rev = req.args.get('rev') or repos.youngest_rev

        dict_ = {
            'user': req.authname,
            'type': req.args.get('realm'),
            'path': '',
            'rev': '',
            'repos': '',
        }

        if dict_['type'] == 'attachment':
            dict_['path'] = path

        if dict_['type'] == 'changeset':
            dict_['rev'] = path[1:]
            dict_['repos'] = repos.reponame

        if dict_['type'] == 'browser':
            if len(path) == 0:
                dict_['path'] = '/'
            else:
                dict_['path'] = path[1:]
            dict_['rev'] = rev
            dict_['repos'] = repos.reponame

        return cls._from_dict(env, dict_, create=create)
Exemplo n.º 6
0
    def openProject(self, req, projectname, revision, extensionlist):
        """ Returns project's connection string and repository file list matched to search criteria.
        """
        # Return values
        if str(projectname).strip() == '':
            e = exceptions.Exception
            raise e("Incorrect project name")

        if revision.strip() == '':
            revision = None
        if extensionlist.strip() == '':
            extensionlist = None

        # Find node for the requested path/rev
        repomgr = RepositoryManager(self.env)
        repository = repomgr.get_repository(None)
        projectname = conf.cleanupProjectName(projectname)
        parts = []

        project = Project.get(env_name=projectname)
        if project:
            parts.append(self.get_scm_repository_url(project.env_name))
        else:
            return []

        try:
            if revision:
                revision = repository.normalize_rev(revision)
            rev_or_latest = revision or repository.youngest_rev

            getfiles = []
            self._getfiles(req, getfiles, repository, '', rev_or_latest)

            if extensionlist:
                extensions = extensionlist.split(',')
                searchresult = []
                for file in getfiles:
                    extension = os.path.splitext(str(file))[1].strip('.')
                    if extension in extensions:
                        searchresult.append(file)
                addfiles = ",".join(searchresult)
            else:
                addfiles = ",".join(getfiles)

            if addfiles:
                # Append version control files
                parts.append('versioncontrolfiles|' + addfiles)
        except Exception:
            self.log.exception("ProjectsRPC.openProject failed")
        return parts
Exemplo n.º 7
0
    def openProject(self, req, projectname, revision, extensionlist):
        """ Returns project's connection string and repository file list matched to search criteria.
        """
        # Return values
        if str(projectname).strip() == '':
            e = exceptions.Exception
            raise e("Incorrect project name")

        if revision.strip() == '':
            revision = None
        if extensionlist.strip() == '':
            extensionlist = None

        # Find node for the requested path/rev
        repomgr = RepositoryManager(self.env)
        repository = repomgr.get_repository(None)
        projectname = conf.cleanupProjectName(projectname)
        parts = []

        project = Project.get(env_name=projectname)
        if project:
            parts.append(self.get_scm_repository_url(project.env_name))
        else:
            return []

        try:
            if revision:
                revision = repository.normalize_rev(revision)
            rev_or_latest = revision or repository.youngest_rev

            getfiles = []
            self._getfiles(req, getfiles, repository, '', rev_or_latest)

            if extensionlist:
                extensions = extensionlist.split(',')
                searchresult = []
                for file in getfiles:
                    extension = os.path.splitext(str(file))[1].strip('.')
                    if extension in extensions:
                        searchresult.append(file)
                addfiles = ",".join(searchresult)
            else:
                addfiles = ",".join(getfiles)

            if addfiles:
                # Append version control files
                parts.append('versioncontrolfiles|' + addfiles)
        except Exception:
            self.log.exception("ProjectsRPC.openProject failed")
        return parts
Exemplo n.º 8
0
    def _process_repository(self, name):
        if not name:
            return

        rm = RepositoryManager(self.env)
        trac_repo = rm.get_repository(name)

        if not trac_repo or not hasattr(trac_repo, 'path'):
            return

        # Global UI instance
        ui = mercurial_ui.ui()

        repo = hg.repository(ui, trac_repo.path)

        # Repo UI instance (with repo configuration)
        ui = repo.ui

        # Pulling from default source (as configured in repo configuration)
        commands.pull(ui, repo)
Exemplo n.º 9
0
    def _process_repository(self, name):
        if not name:
            return

        rm = RepositoryManager(self.env)
        trac_repo = rm.get_repository(name)

        if not trac_repo or not hasattr(trac_repo, 'path'):
            return

        # Global UI instance
        ui = mercurial_ui.ui()

        repo = hg.repository(ui, trac_repo.path)

        # Repo UI instance (with repo configuration)
        ui = repo.ui

        # Pulling from default source (as configured in repo configuration)
        commands.pull(ui, repo)
Exemplo n.º 10
0
    def _process_repository(self, name):
        if not name:
            return

        rm = RepositoryManager(self.env)
        trac_repo = rm.get_repository(name)

        if not trac_repo or not hasattr(trac_repo, 'gitrepo'):
            return

        self.env.log.debug("GitHubSync: Processing repository at '%s'", trac_repo.gitrepo)

        lock_file = os.path.join(trac_repo.gitrepo, 'githubsync.lock')
        lock = open(lock_file, 'w')
        fcntl.lockf(lock, fcntl.LOCK_EX)

        try:
            self.env.log.debug("GitHubSync: Lock acquired")

            before_revisions = set(trac_repo.git.repo.rev_list('--branches', '--tags').splitlines())

            # Pulling from default source (as configured in repo configuration)
            output = trac_repo.git.repo.fetch('--all', '--prune', '--tags')
            self.env.log.debug("GitHubSync: git output: %s", output)

            after_revisions = set(trac_repo.git.repo.rev_list('--branches', '--tags').splitlines())
        finally:
            fcntl.lockf(lock, fcntl.LOCK_UN)
            lock.close()
            os.unlink(lock_file)

            self.env.log.debug("GitHubSync: Lock released")

        new_revisions = after_revisions - before_revisions
        if len(new_revisions) > 0:
            self.env.log.debug("GitHubSync: New revisions: %s", new_revisions)
            cmd_mgr = AdminCommandManager(self.env)
            cmd_mgr.execute_command('changeset', 'added', name, *new_revisions)
Exemplo n.º 11
0
    def _sync(self, reponame, rev, clean):
        rm = RepositoryManager(self.env)
        if reponame == "*":
            if rev is not None:
                raise TracError(_("Cannot synchronize a single revision " "on multiple repositories"))
            repositories = rm.get_real_repositories()
        else:
            if is_default(reponame):
                reponame = ""
            repos = rm.get_repository(reponame)
            if repos is None:
                raise TracError(_("Repository '%(repo)s' not found", repo=reponame or "(default)"))
            if rev is not None:
                repos.sync_changeset(rev)
                printout(_("%(rev)s resynced on %(reponame)s.", rev=rev, reponame=repos.reponame or "(default)"))
                return
            repositories = [repos]

        for repos in sorted(repositories, key=lambda r: r.reponame):
            printout(_("Resyncing repository history for %(reponame)s... ", reponame=repos.reponame or "(default)"))
            repos.sync(self._sync_feedback, clean=clean)
            for (cnt,) in self.env.db_query("SELECT count(rev) FROM revision WHERE repos=%s", (repos.id,)):
                printout(ngettext("%(num)s revision cached.", "%(num)s revisions cached.", num=cnt))
        printout(_("Done."))
 def verifyAll(self):
     all_verified_good = True
     rm = RepositoryManager(self.env)
     for reponame, info in rm.get_all_repositories().iteritems():
         self.log.debug("Considering %s", info)
         if info.get('type',rm.repository_type) == "svn" or (rm.repository_type == 'svn' and info.get('type') == ''):
             if self.number_of_commits_to_verify < 0:
                 bound = 0
             elif self.number_of_commits_to_verify == 0:
                 self.log.warning("Not actually verifying any commits due to [svn]verify_n_commits = 0")
                 return 0
             else:
                 y = rm.get_repository(reponame).youngest_rev
                 bound = max(0, y - self.number_of_commits_to_verify + 1)
                 self.log.debug("Only want to verify %d commits, so range is %d:HEAD (HEAD is currently %d)", 
                                self.number_of_commits_to_verify,
                                bound,
                                y)
             if not self.verify(info['id'], info['dir'], start=bound):
                     all_verified_good = False
     if not all_verified_good:
         return 1
     else:
         return 0
Exemplo n.º 13
0
    def _do_sync(self, reponame):
        rm = RepositoryManager(self.env)
        repos = rm.get_repository(reponame)
        if repos is None:
            raise TracError("Repository '%(repo)s' not found", repo=reponame)

        printout("Synchronizing repository data for repository %s" % (reponame,))

        db = self.env.get_db_cnx()
        cursor = db.cursor()

        # The database stores the heads up to what it has currently cached. Use
        # these heads to determine where to stop to only cache the new
        # revisions
        exclude_list = []
        cursor.execute("SELECT head FROM dulwich_heads WHERE repos=%s", (repos.id,))

        for head in set(row[0] for row in cursor):
            exclude_list.append(head)

        # Determine all the heads for this repository
        heads = []
        refs = repos.dulwichrepo.get_refs()
        for key in refs.keys():
            if key.startswith("refs/heads/"):
                heads.append(refs[key])

        commit_count = 0
        object_count = 0

        walker = repos.dulwichrepo.get_walker(include=heads, exclude=exclude_list)
        for walk in walker:
            for change in walk.changes():
                parents = []
                if isinstance(change, list):
                    # The change is a list when the file is a merge from two
                    # or more previous changesets
                    for c in change:
                        if c.old not in parents:
                            parents.append(c.old)
                    change = change[0]
                else:
                    parents.append(change.old)

                if change.type == "delete":
                    # we don't actually register deletes, they are registered
                    # when they are last modified
                    continue

                # check if this object is already in the database
                cursor.execute(
                    "SELECT commit_id FROM dulwich_objects WHERE repos=%s AND sha=%s", (repos.id, change.new.sha)
                )
                item = cursor.fetchone()

                if item:
                    try:
                        cursor.execute(
                            "UPDATE dulwich_objects SET commit_id=%s WHERE repos=%s AND sha=%s",
                            (walk.commit.id, repos.id, change.new.sha),
                        )
                        db.commit()
                    except:
                        # Todo: this is probably all right and has to do with merge changesets, but need to
                        # verify if it is absolutely the way it should be
                        pass
                else:
                    # in case of add, or a modify of a file that we did not yet encounter (because
                    #   we run in reverse order)
                    cursor.execute(
                        "INSERT INTO dulwich_objects (repos, sha, path, mode, commit_id) VALUES (%s, %s, %s, %s, %s)",
                        (repos.id, change.new.sha, change.new.path.decode("utf-8"), change.new.mode, walk.commit.id),
                    )
                    db.commit()
                    object_count += 1

                if change.type == "add":
                    # above in fetching o we already update the commit_id, so no action here
                    pass
                elif change.type == "modify":
                    for parent in parents:
                        try:
                            # actually the commit_id for the old changeset is wrong, but it will be updated in the following runs of the loop
                            cursor.execute(
                                "INSERT INTO dulwich_objects (repos, sha, path, mode, commit_id) VALUES (%s, %s, %s, %s, %s)",
                                (repos.id, parent.sha, parent.path.decode("utf-8"), parent.mode, walk.commit.id),
                            )
                            db.commit()
                            object_count += 1
                        except:
                            # if this fails, it means that the parent object is already in the database
                            # very likely because of merges. So it is safe to ignore.
                            pass

                # handle the trees
                path = os.path.split(change.new.path)[0]
                if not path:
                    continue
                current_path = ""
                tree = repos.dulwichrepo[walk.commit.tree]
                for part in path.split("/"):
                    # register each tree into the object store
                    current_path += part
                    mode, sha = tree.lookup_path(repos.dulwichrepo.get_object, current_path)
                    try:
                        cursor.execute(
                            "INSERT INTO dulwich_objects (repos, sha, path, mode, commit_id) VALUES (%s, %s, %s, %s, %s)",
                            (repos.id, sha, current_path.decode("utf-8"), mode, walk.commit.id),
                        )
                        db.commit()
                        object_count += 1
                    except:
                        # this tree was already registered with a previous path change
                        pass
                    current_path += "/"
            # prepare for next run
            commit_count += 1
            if commit_count % 5 == 0:
                sys.stdout.write("Synchronized %i commits with %i objects\r" % (commit_count, object_count))
                sys.stdout.flush()

        # Store the heads
        cursor.execute("DELETE FROM dulwich_heads WHERE repos=%s", (repos.id,))
        for head in heads:
            cursor.execute(
                """INSERT INTO dulwich_heads (repos, head)  
                           VALUES (%s, %s)
                           """,
                (repos.id, head),
            )
        db.commit()
        printout("Synchronized %i commits with %i objects" % (commit_count, object_count))