def test_push_invalidates_cache_git(self, webserver):
        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == GIT_REPO).scalar()
        if not key:
            key = CacheInvalidation(GIT_REPO, GIT_REPO)
            Session().add(key)

        key.cache_active = True
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = webserver.repo_url(GIT_REPO)
        stdout, stderr = Command(TESTS_TMP_PATH).execute(
            'git clone', clone_url, DEST)

        # commit some stuff into this repo
        fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next())
        fixture.create_fork(GIT_REPO, fork_name)
        clone_url = webserver.repo_url(fork_name)
        stdout, stderr = _add_files_and_push(webserver,
                                             'git',
                                             DEST,
                                             files_no=1,
                                             clone_url=clone_url)
        _check_proper_git_push(stdout, stderr)

        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == fork_name).all()
        assert key == []
    def test_push_invalidates_cache_hg(self, webserver):
        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == HG_REPO).scalar()
        if not key:
            key = CacheInvalidation(HG_REPO, HG_REPO)
            Session().add(key)

        key.cache_active = True
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = webserver.repo_url(HG_REPO)
        stdout, stderr = Command(TESTS_TMP_PATH).execute(
            'hg clone', clone_url, DEST)

        fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next())
        fixture.create_fork(HG_REPO, fork_name)
        clone_url = webserver.repo_url(fork_name)
        stdout, stderr = _add_files_and_push(webserver,
                                             'hg',
                                             DEST,
                                             files_no=1,
                                             clone_url=clone_url)

        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == fork_name).all()
        assert key == []
Example #3
0
    def mark_for_invalidation(self, repo_name):
        """
        Mark caches of this repo invalid in the database.

        :param repo_name: the repo for which caches should be marked invalid
        """
        CacheInvalidation.set_invalidate(repo_name)
        repo = Repository.get_by_repo_name(repo_name)
        if repo is not None:
            repo.update_changeset_cache()
Example #4
0
    def mark_for_invalidation(self, repo_name, delete=False):
        """
        Mark caches of this repo invalid in the database.

        :param repo_name: the repo for which caches should be marked invalid
        """
        CacheInvalidation.set_invalidate(repo_name, delete=delete)
        repo = Repository.get_by_repo_name(repo_name)
        if repo:
            repo.update_changeset_cache()
Example #5
0
    def rss(self, repo_name):
        """Produce an rss2 feed via feedgenerator module"""

        @cache_region('long_term')
        def _get_feed_from_cache(key, kind):
            feed = Rss201rev2Feed(
                title=self.title % repo_name,
                link=h.canonical_url('summary_home', repo_name=repo_name),
                description=self.description % repo_name,
                language=self.language,
                ttl=self.ttl
            )

            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
                feed.add_item(title=self._get_title(cs),
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
                                       revision=cs.raw_id),
                              author_name=cs.author,
                              description=''.join(self.__get_desc(cs)),
                              pubdate=cs.date,
                             )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'RSS'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
Example #6
0
    def atom(self, repo_name):
        """Produce an atom-1.0 feed via feedgenerator module"""
        @cache_region('long_term', '_get_feed_from_cache')
        def _get_feed_from_cache(key, kind):
            feed = Atom1Feed(title=_('%s %s feed') % (c.site_name, repo_name),
                             link=h.canonical_url('summary_home',
                                                  repo_name=repo_name),
                             description=_('Changes on %s repository') %
                             repo_name,
                             language=language,
                             ttl=ttl)

            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
            for cs in reversed(
                    list(c.db_repo_scm_instance[-rss_items_per_page:])):
                feed.add_item(
                    title=self._get_title(cs),
                    link=h.canonical_url('changeset_home',
                                         repo_name=repo_name,
                                         revision=cs.raw_id),
                    author_name=cs.author,
                    description=''.join(self.__get_desc(cs)),
                    pubdate=cs.date,
                )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'ATOM'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None,
                              '_get_feed_from_cache', repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
Example #7
0
    def atom(self, repo_name):
        """Produce an atom-1.0 feed via feedgenerator module"""

        @cache_region('long_term', '_get_feed_from_cache')
        def _get_feed_from_cache(key, kind):
            feed = Atom1Feed(
                title=_('%s %s feed') % (c.site_name, repo_name),
                link=h.canonical_url('summary_home', repo_name=repo_name),
                description=_('Changes on %s repository') % repo_name,
                language=language,
                ttl=ttl
            )

            rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20))
            for cs in reversed(list(c.db_repo_scm_instance[-rss_items_per_page:])):
                feed.add_item(title=self._get_title(cs),
                              link=h.canonical_url('changeset_home', repo_name=repo_name,
                                       revision=cs.raw_id),
                              author_name=cs.author,
                              description=''.join(self.__get_desc(cs)),
                              pubdate=cs.date,
                              )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'ATOM'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None, '_get_feed_from_cache', repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
Example #8
0
    def rss(self, repo_name):
        """Produce an rss2 feed via feedgenerator module"""
        @cache_region('long_term')
        def _get_feed_from_cache(key, kind):
            feed = Rss201rev2Feed(title=self.title % repo_name,
                                  link=h.canonical_url('summary_home',
                                                       repo_name=repo_name),
                                  description=self.description % repo_name,
                                  language=self.language,
                                  ttl=self.ttl)

            for cs in reversed(list(c.db_repo_scm_instance[-self.feed_nr:])):
                feed.add_item(
                    title=self._get_title(cs),
                    link=h.canonical_url('changeset_home',
                                         repo_name=repo_name,
                                         revision=cs.raw_id),
                    author_name=cs.author,
                    description=''.join(self.__get_desc(cs)),
                    pubdate=cs.date,
                )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'RSS'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
    def test_push_invalidates_cache_hg(self):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).scalar()
        if not key:
            key = CacheInvalidation(HG_REPO, HG_REPO)

        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(HG_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)

        stdout, stderr = _add_files_and_push('hg', DEST, files_no=1)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).one()
        self.assertEqual(key.cache_active, False)
Example #10
0
    def test_push_invalidates_cache_hg(self):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).scalar()
        if not key:
            key = CacheInvalidation(HG_REPO, HG_REPO)

        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(HG_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)

        stdout, stderr = _add_files_and_push('hg', DEST, files_no=1)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).one()
        self.assertEqual(key.cache_active, False)
Example #11
0
 def take_action(self, args):
     _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
     if args.show:
         for c_obj in _caches:
             print 'key:%s active:%s' % (safe_str(c_obj.cache_key), c_obj.cache_active)
     elif args.cleanup:
         for c_obj in _caches:
             Session().delete(c_obj)
             print 'Removing key: %s' % (safe_str(c_obj.cache_key))
         Session().commit()
     else:
         print 'Nothing done, exiting...'
    def test_push_invalidates_cache_hg(self, webserver):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).scalar()
        if not key:
            key = CacheInvalidation(HG_REPO, HG_REPO)
            Session().add(key)

        key.cache_active = True
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = webserver.repo_url(HG_REPO)
        stdout, stderr = Command(TESTS_TMP_PATH).execute('hg clone', clone_url, DEST)

        fork_name = '%s_fork%s' % (HG_REPO, _RandomNameSequence().next())
        fixture.create_fork(HG_REPO, fork_name)
        clone_url = webserver.repo_url(fork_name)
        stdout, stderr = _add_files_and_push(webserver, 'hg', DEST, files_no=1, clone_url=clone_url)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==fork_name).all()
        assert key == []
Example #13
0
 def take_action(self, args):
     _caches = CacheInvalidation.query().order_by(
         CacheInvalidation.cache_key).all()
     if args.show:
         for c_obj in _caches:
             print 'key:%s active:%s' % (safe_str(
                 c_obj.cache_key), c_obj.cache_active)
     elif args.cleanup:
         for c_obj in _caches:
             Session().delete(c_obj)
             print 'Removing key: %s' % (safe_str(c_obj.cache_key))
         Session().commit()
     else:
         print 'Nothing done, exiting...'
Example #14
0
 def command(self):
     #get SqlAlchemy session
     self._init_session()
     _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
     if self.options.show:
         for c_obj in _caches:
             print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active)
     elif self.options.cleanup:
         for c_obj in _caches:
             Session().delete(c_obj)
             print 'removing key:%s' % (c_obj.cache_key)
             Session().commit()
     else:
         print 'nothing done exiting...'
     sys.exit(0)
    def test_push_invalidates_cache_git(self, webserver):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==GIT_REPO).scalar()
        if not key:
            key = CacheInvalidation(GIT_REPO, GIT_REPO)
            Session().add(key)

        key.cache_active = True
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = webserver.repo_url(GIT_REPO)
        stdout, stderr = Command(TESTS_TMP_PATH).execute('git clone', clone_url, DEST)

        # commit some stuff into this repo
        fork_name = '%s_fork%s' % (GIT_REPO, _RandomNameSequence().next())
        fixture.create_fork(GIT_REPO, fork_name)
        clone_url = webserver.repo_url(fork_name)
        stdout, stderr = _add_files_and_push(webserver, 'git', DEST, files_no=1, clone_url=clone_url)
        _check_proper_git_push(stdout, stderr)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==fork_name).all()
        assert key == []
Example #16
0
    def __iter__(self):
        # pre-propagated valid_cache_keys to save executing select statements
        # for each repo
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()

        for dbr in self.db_repo_list:
            scmr = dbr.scm_instance_cached(valid_cache_keys)
            # check permission at this level
            if not HasRepoPermissionAny(
                *self.perm_set)(dbr.repo_name, 'get repo check'):
                continue

            try:
                last_change = scmr.last_change
                tip = h.get_changeset_safe(scmr, 'tip')
            except Exception:
                log.error(
                    '%s this repository is present in database but it '
                    'cannot be created as an scm instance, org_exc:%s'
                    % (dbr.repo_name, traceback.format_exc())
                )
                continue

            tmp_d = {}
            tmp_d['name'] = dbr.repo_name
            tmp_d['name_sort'] = tmp_d['name'].lower()
            tmp_d['raw_name'] = tmp_d['name'].lower()
            tmp_d['description'] = dbr.description
            tmp_d['description_sort'] = tmp_d['description'].lower()
            tmp_d['last_change'] = last_change
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
            tmp_d['tip'] = tip.raw_id
            tmp_d['tip_sort'] = tip.revision
            tmp_d['rev'] = tip.revision
            tmp_d['contact'] = dbr.user.full_contact
            tmp_d['contact_sort'] = tmp_d['contact']
            tmp_d['owner_sort'] = tmp_d['contact']
            tmp_d['repo_archives'] = list(scmr._get_archives())
            tmp_d['last_msg'] = tip.message
            tmp_d['author'] = tip.author
            tmp_d['dbrepo'] = dbr.get_dict()
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
            yield tmp_d
Example #17
0
    def __iter__(self):
        # pre-propagated valid_cache_keys to save executing select statements
        # for each repo
        valid_cache_keys = CacheInvalidation.get_valid_cache_keys()

        for dbr in self.db_repo_list:
            scmr = dbr.scm_instance_cached(valid_cache_keys)
            # check permission at this level
            if not HasRepoPermissionAny(*self.perm_set)(dbr.repo_name,
                                                        'get repo check'):
                continue

            try:
                last_change = scmr.last_change
                tip = h.get_changeset_safe(scmr, 'tip')
            except Exception:
                log.error('%s this repository is present in database but it '
                          'cannot be created as an scm instance, org_exc:%s' %
                          (dbr.repo_name, traceback.format_exc()))
                continue

            tmp_d = {}
            tmp_d['name'] = dbr.repo_name
            tmp_d['name_sort'] = tmp_d['name'].lower()
            tmp_d['raw_name'] = tmp_d['name'].lower()
            tmp_d['description'] = dbr.description
            tmp_d['description_sort'] = tmp_d['description'].lower()
            tmp_d['last_change'] = last_change
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
            tmp_d['tip'] = tip.raw_id
            tmp_d['tip_sort'] = tip.revision
            tmp_d['rev'] = tip.revision
            tmp_d['contact'] = dbr.user.full_contact
            tmp_d['contact_sort'] = tmp_d['contact']
            tmp_d['owner_sort'] = tmp_d['contact']
            tmp_d['repo_archives'] = list(scmr._get_archives())
            tmp_d['last_msg'] = tip.message
            tmp_d['author'] = tip.author
            tmp_d['dbrepo'] = dbr.get_dict()
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
            yield tmp_d
Example #18
0
    def __get_readme_data(self, db_repo):
        repo_name = db_repo.repo_name
        log.debug('Looking for README file')

        @cache_region('long_term', '_get_readme_from_cache')
        def _get_readme_from_cache(key, kind):
            readme_data = None
            readme_file = None
            try:
                # gets the landing revision! or tip if fails
                cs = db_repo.get_landing_changeset()
                if isinstance(cs, EmptyChangeset):
                    raise EmptyRepositoryError()
                renderer = MarkupRenderer()
                for f in README_FILES:
                    try:
                        readme = cs.get_node(f)
                        if not isinstance(readme, FileNode):
                            continue
                        readme_file = f
                        log.debug('Found README file `%s` rendering...',
                                  readme_file)
                        readme_data = renderer.render(readme.content,
                                                      filename=f)
                        break
                    except NodeDoesNotExistError:
                        continue
            except ChangesetError:
                log.error(traceback.format_exc())
                pass
            except EmptyRepositoryError:
                pass

            return readme_data, readme_file

        kind = 'README'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_readme_from_cache, None, '_get_readme_from_cache', repo_name, kind)
        return _get_readme_from_cache(repo_name, kind)
Example #19
0
    def test_push_invalidates_cache_git(self):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==GIT_REPO).scalar()
        if not key:
            key = CacheInvalidation(GIT_REPO, GIT_REPO)

        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(GIT_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)

        # commit some stuff into this repo
        stdout, stderr = _add_files_and_push('git', DEST, files_no=1)
        _check_proper_git_push(stdout, stderr)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==GIT_REPO).one()
        print CacheInvalidation.get_all()
        self.assertEqual(key.cache_active, False)
    def test_push_invalidates_cache_git(self):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==GIT_REPO).scalar()
        if not key:
            key = CacheInvalidation(GIT_REPO, GIT_REPO)

        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(GIT_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('git clone', clone_url)

        # commit some stuff into this repo
        stdout, stderr = _add_files_and_push('git', DEST, files_no=1)
        _check_proper_git_push(stdout, stderr)

        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==GIT_REPO).one()
        print CacheInvalidation.get_all()
        self.assertEqual(key.cache_active, False)
Example #21
0
    def upgrade(self):
        """
        Upgrades given database schema to given revision following
        all needed steps, to perform the upgrade

        """

        from kallithea.lib.dbmigrate.migrate.versioning import api
        from kallithea.lib.dbmigrate.migrate.exceptions import \
            DatabaseNotControlledError

        if 'sqlite' in self.dburi:
            print(
                '********************** WARNING **********************\n'
                'Make sure your version of sqlite is at least 3.7.X.  \n'
                'Earlier versions are known to fail on some migrations\n'
                '*****************************************************\n')

        upgrade = ask_ok('You are about to perform database upgrade, make '
                         'sure You backed up your database before. '
                         'Continue ? [y/n]')
        if not upgrade:
            print 'No upgrade performed'
            sys.exit(0)

        repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
                             'kallithea/lib/dbmigrate')
        db_uri = self.dburi

        try:
            curr_version = api.db_version(db_uri, repository_path)
            msg = ('Found current database under version '
                   'control with version %s' % curr_version)

        except (RuntimeError, DatabaseNotControlledError):
            curr_version = 1
            msg = ('Current database is not under version control. Setting '
                   'as version %s' % curr_version)
            api.version_control(db_uri, repository_path, curr_version)

        notify(msg)
        if curr_version == __dbversion__:
            print 'This database is already at the newest version'
            sys.exit(0)

        # clear cache keys
        log.info("Clearing cache keys now...")
        CacheInvalidation.clear_cache()

        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
        notify('attempting to do database upgrade from '
               'version %s to version %s' % (curr_version, __dbversion__))

        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
        _step = None
        for step in upgrade_steps:
            notify('performing upgrade step %s' % step)
            time.sleep(0.5)

            api.upgrade(db_uri, repository_path, step)
            notify('schema upgrade for step %s completed' % (step, ))

            _step = step

        notify('upgrade to version %s successful' % _step)
Example #22
0
    def upgrade(self):
        """
        Upgrades given database schema to given revision following
        all needed steps, to perform the upgrade

        """

        from kallithea.lib.dbmigrate.migrate.versioning import api
        from kallithea.lib.dbmigrate.migrate.exceptions import \
            DatabaseNotControlledError

        if 'sqlite' in self.dburi:
            print (
               '********************** WARNING **********************\n'
               'Make sure your version of sqlite is at least 3.7.X.  \n'
               'Earlier versions are known to fail on some migrations\n'
               '*****************************************************\n')

        upgrade = ask_ok('You are about to perform database upgrade, make '
                         'sure You backed up your database before. '
                         'Continue ? [y/n]')
        if not upgrade:
            print 'No upgrade performed'
            sys.exit(0)

        repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
                             'kallithea/lib/dbmigrate')
        db_uri = self.dburi

        try:
            curr_version = api.db_version(db_uri, repository_path)
            msg = ('Found current database under version '
                   'control with version %s' % curr_version)

        except (RuntimeError, DatabaseNotControlledError):
            curr_version = 1
            msg = ('Current database is not under version control. Setting '
                   'as version %s' % curr_version)
            api.version_control(db_uri, repository_path, curr_version)

        notify(msg)
        if curr_version == __dbversion__:
            print 'This database is already at the newest version'
            sys.exit(0)

        # clear cache keys
        log.info("Clearing cache keys now...")
        CacheInvalidation.clear_cache()

        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
        notify('attempting to do database upgrade from '
               'version %s to version %s' % (curr_version, __dbversion__))

        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
        _step = None
        for step in upgrade_steps:
            notify('performing upgrade step %s' % step)
            time.sleep(0.5)

            api.upgrade(db_uri, repository_path, step)
            notify('schema upgrade for step %s completed' % (step,))

            _step = step

        notify('upgrade to version %s successful' % _step)