Example #1
0
    def __get_readme_data(self, repo_name, repo):

        @cache_region('long_term')
        def _get_readme_from_cache(key):
            readme_data = None
            readme_file = None
            log.debug('Fetching readme file')
            try:
                cs = repo.get_changeset()  # fetches TIP
                renderer = MarkupRenderer()
                for f in README_FILES:
                    try:
                        readme = cs.get_node(f)
                        readme_file = f
                        readme_data = renderer.render(readme.content, f)
                        log.debug('Found readme %s' % readme_file)
                        break
                    except NodeDoesNotExistError:
                        continue
            except ChangesetError:
                log.error(traceback.format_exc())
                pass
            except EmptyRepositoryError:
                pass
            except Exception:
                log.error(traceback.format_exc())

            return readme_data, readme_file

        key = repo_name + '_README'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_readme_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_readme_from_cache(key)
Example #2
0
    def rss(self, repo_name):
        """Produce an rss2 feed via feedgenerator module"""

        @cache_region('long_term')
        def _get_feed_from_cache(key):
            feed = Rss201rev2Feed(
                title=self.title % repo_name,
                link=url('summary_home', repo_name=repo_name,
                         qualified=True),
                description=self.description % repo_name,
                language=self.language,
                ttl=self.ttl
            )

            for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
                feed.add_item(title=self._get_title(cs),
                              link=url('changeset_home', repo_name=repo_name,
                                       revision=cs.raw_id, qualified=True),
                              author_name=cs.author,
                              description=''.join(self.__get_desc(cs)),
                              pubdate=cs.date,
                             )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        key = repo_name + '_RSS'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_feed_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_feed_from_cache(key)
Example #3
0
    def rss(self, repo_name):
        """Produce an rss2 feed via feedgenerator module"""

        @cache_region('long_term')
        def _get_feed_from_cache(key):
            feed = Rss201rev2Feed(
                title=self.title % repo_name,
                link=url('summary_home', repo_name=repo_name,
                         qualified=True),
                description=self.description % repo_name,
                language=self.language,
                ttl=self.ttl
            )

            for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
                feed.add_item(title=self._get_title(cs),
                              link=url('changeset_home', repo_name=repo_name,
                                       revision=cs.raw_id, qualified=True),
                              author_name=cs.author,
                              description=''.join(self.__get_desc(cs)),
                              pubdate=cs.date,
                             )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        key = repo_name + '_RSS'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_feed_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_feed_from_cache(key)
Example #4
0
    def __get_readme_data(self, repo_name, repo):
        @cache_region('long_term')
        def _get_readme_from_cache(key):
            readme_data = None
            readme_file = None
            log.debug('Fetching readme file')
            try:
                cs = repo.get_changeset()  # fetches TIP
                renderer = MarkupRenderer()
                for f in README_FILES:
                    try:
                        readme = cs.get_node(f)
                        readme_file = f
                        readme_data = renderer.render(readme.content, f)
                        log.debug('Found readme %s' % readme_file)
                        break
                    except NodeDoesNotExistError:
                        continue
            except ChangesetError:
                log.error(traceback.format_exc())
                pass
            except EmptyRepositoryError:
                pass
            except Exception:
                log.error(traceback.format_exc())

            return readme_data, readme_file

        key = repo_name + '_README'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_readme_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_readme_from_cache(key)
Example #5
0
    def mark_for_invalidation(self, repo_name):
        """
        Puts cache invalidation task into db for
        further global cache invalidation

        :param repo_name: this repo that should invalidation take place
        """
        CacheInvalidation.set_invalidate(repo_name)
Example #6
0
    def mark_for_invalidation(self, repo_name):
        """
        Puts cache invalidation task into db for
        further global cache invalidation

        :param repo_name: this repo that should invalidation take place
        """
        CacheInvalidation.set_invalidate(repo_name)
Example #7
0
    def mark_for_invalidation(self, repo_name):
        """
        Mark caches of this repo invalid in the database.

        :param repo_name: the repo for which caches should be marked invalid
        """
        CacheInvalidation.set_invalidate(repo_name)
        repo = Repository.get_by_repo_name(repo_name)
        if repo:
            repo.update_changeset_cache()
Example #8
0
def repo2db_mapper(initial_repo_list, remove_obsolete=False):
    """
    maps all repos given in initial_repo_list, non existing repositories
    are created, if remove_obsolete is True it also check for db entries
    that are not in initial_repo_list and removes them.

    :param initial_repo_list: list of repositories found by scanning methods
    :param remove_obsolete: check for obsolete entries in database
    """
    from rhodecode.model.repo import RepoModel
    sa = meta.Session
    rm = RepoModel()
    user = sa.query(User).filter(User.admin == True).first()
    if user is None:
        raise Exception('Missing administrative account !')
    added = []

    for name, repo in initial_repo_list.items():
        group = map_groups(name)
        if not rm.get_by_repo_name(name, cache=False):
            log.info('repository %s not found creating default' % name)
            added.append(name)
            form_data = {
             'repo_name': name,
             'repo_name_full': name,
             'repo_type': repo.alias,
             'description': repo.description \
                if repo.description != 'unknown' else '%s repository' % name,
             'private': False,
             'group_id': getattr(group, 'group_id', None)
            }
            rm.create(form_data, user, just_db=True)
    sa.commit()
    removed = []
    if remove_obsolete:
        # remove from database those repositories that are not in the filesystem
        for repo in sa.query(Repository).all():
            if repo.repo_name not in initial_repo_list.keys():
                log.debug("Removing non existing repository found in db %s" %
                          repo.repo_name)
                removed.append(repo.repo_name)
                sa.delete(repo)
                sa.commit()

    # clear cache keys
    log.debug("Clearing cache keys now...")
    CacheInvalidation.clear_cache()
    sa.commit()
    return added, removed
    def test_push_invalidates_cache_hg(self):
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).one()
        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(HG_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)

        stdout, stderr = _add_files_and_push('hg', DEST, files_no=1)
        key = CacheInvalidation.query().filter(CacheInvalidation.cache_key
                                               ==HG_REPO).one()
        self.assertEqual(key.cache_active, False)
Example #10
0
    def test_push_invalidates_cache_hg(self):
        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == HG_REPO).one()
        key.cache_active = True
        Session().add(key)
        Session().commit()

        DEST = _get_tmp_dir()
        clone_url = _construct_url(HG_REPO, dest=DEST)
        stdout, stderr = Command('/tmp').execute('hg clone', clone_url)

        stdout, stderr = _add_files_and_push('hg', DEST, files_no=1)
        key = CacheInvalidation.query().filter(
            CacheInvalidation.cache_key == HG_REPO).one()
        self.assertEqual(key.cache_active, False)
Example #11
0
    def atom(self, repo_name):
        """Produce an atom-1.0 feed via feedgenerator module"""
        @cache_region('long_term')
        def _get_feed_from_cache(key, kind):
            feed = Atom1Feed(title=self.title % repo_name,
                             link=url('summary_home',
                                      repo_name=repo_name,
                                      qualified=True),
                             description=self.description % repo_name,
                             language=self.language,
                             ttl=self.ttl)

            for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
                feed.add_item(
                    title=self._get_title(cs),
                    link=url('changeset_home',
                             repo_name=repo_name,
                             revision=cs.raw_id,
                             qualified=True),
                    author_name=cs.author,
                    description=''.join(self.__get_desc(cs)),
                    pubdate=cs.date,
                )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'ATOM'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
Example #12
0
    def atom(self, repo_name):
        """Produce an atom-1.0 feed via feedgenerator module"""

        @cache_region('long_term')
        def _get_feed_from_cache(key, kind):
            feed = Atom1Feed(
                 title=self.title % repo_name,
                 link=url('summary_home', repo_name=repo_name,
                          qualified=True),
                 description=self.description % repo_name,
                 language=self.language,
                 ttl=self.ttl
            )

            for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])):
                feed.add_item(title=self._get_title(cs),
                              link=url('changeset_home', repo_name=repo_name,
                                       revision=cs.raw_id, qualified=True),
                              author_name=cs.author,
                              description=''.join(self.__get_desc(cs)),
                              pubdate=cs.date,
                              )

            response.content_type = feed.mime_type
            return feed.writeString('utf-8')

        kind = 'ATOM'
        valid = CacheInvalidation.test_and_set_valid(repo_name, kind)
        if not valid:
            region_invalidate(_get_feed_from_cache, None, repo_name, kind)
        return _get_feed_from_cache(repo_name, kind)
Example #13
0
    def __get_readme_data(self, db_repo):
        repo_name = db_repo.repo_name

        @cache_region('long_term')
        def _get_readme_from_cache(key):
            readme_data = None
            readme_file = None
            log.debug('Looking for README file')
            try:
                # get's the landing revision! or tip if fails
                cs = db_repo.get_landing_changeset()
                if isinstance(cs, EmptyChangeset):
                    raise EmptyRepositoryError()
                renderer = MarkupRenderer()
                for f in README_FILES:
                    try:
                        readme = cs.get_node(f)
                        if not isinstance(readme, FileNode):
                            continue
                        readme_file = f
                        log.debug('Found README file `%s` rendering...' %
                                  readme_file)
                        readme_data = renderer.render(readme.content, f)
                        break
                    except NodeDoesNotExistError:
                        continue
            except ChangesetError:
                log.error(traceback.format_exc())
                pass
            except EmptyRepositoryError:
                pass
            except Exception:
                log.error(traceback.format_exc())

            return readme_data, readme_file

        key = repo_name + '_README'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_readme_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_readme_from_cache(key)
Example #14
0
    def __get_readme_data(self, db_repo):
        repo_name = db_repo.repo_name

        @cache_region('long_term')
        def _get_readme_from_cache(key):
            readme_data = None
            readme_file = None
            log.debug('Looking for README file')
            try:
                # get's the landing revision! or tip if fails
                cs = db_repo.get_landing_changeset()
                if isinstance(cs, EmptyChangeset):
                    raise EmptyRepositoryError()
                renderer = MarkupRenderer()
                for f in README_FILES:
                    try:
                        readme = cs.get_node(f)
                        if not isinstance(readme, FileNode):
                            continue
                        readme_file = f
                        log.debug('Found README file `%s` rendering...' %
                                  readme_file)
                        readme_data = renderer.render(readme.content, f)
                        break
                    except NodeDoesNotExistError:
                        continue
            except ChangesetError:
                log.error(traceback.format_exc())
                pass
            except EmptyRepositoryError:
                pass
            except Exception:
                log.error(traceback.format_exc())

            return readme_data, readme_file

        key = repo_name + '_README'
        inv = CacheInvalidation.invalidate(key)
        if inv is not None:
            region_invalidate(_get_readme_from_cache, None, key)
            CacheInvalidation.set_valid(inv.cache_key)
        return _get_readme_from_cache(key)
Example #15
0
    def mark_for_invalidation(self, repo_name):
        """
        Puts cache invalidation task into db for
        further global cache invalidation

        :param repo_name: this repo that should invalidation take place
        """
        invalidated_keys = CacheInvalidation.set_invalidate(repo_name=repo_name)
        repo = Repository.get_by_repo_name(repo_name)
        if repo:
            repo.update_changeset_cache()
        return invalidated_keys
Example #16
0
    def mark_for_invalidation(self, repo_name):
        """
        Puts cache invalidation task into db for
        further global cache invalidation

        :param repo_name: this repo that should invalidation take place
        """
        invalidated_keys = CacheInvalidation.set_invalidate(
            repo_name=repo_name)
        repo = Repository.get_by_repo_name(repo_name)
        if repo:
            repo.update_changeset_cache()
        return invalidated_keys
Example #17
0
 def command(self):
     #get SqlAlchemy session
     self._init_session()
     _caches = CacheInvalidation.query().order_by(CacheInvalidation.cache_key).all()
     if self.options.show:
         for c_obj in _caches:
             print 'key:%s active:%s' % (c_obj.cache_key, c_obj.cache_active)
     elif self.options.cleanup:
         for c_obj in _caches:
             Session().delete(c_obj)
             print 'removing key:%s' % (c_obj.cache_key)
         Session().commit()
     else:
         print 'nothing done exiting...'
     sys.exit(0)
Example #18
0
 def command(self):
     #get SqlAlchemy session
     self._init_session()
     _caches = CacheInvalidation.query().order_by(
         CacheInvalidation.cache_key).all()
     if self.options.show:
         for c_obj in _caches:
             print 'key:%s active:%s' % (c_obj.cache_key,
                                         c_obj.cache_active)
     elif self.options.cleanup:
         for c_obj in _caches:
             Session().delete(c_obj)
             print 'removing key:%s' % (c_obj.cache_key)
         Session().commit()
     else:
         print 'nothing done exiting...'
     sys.exit(0)
Example #19
0
    def __iter__(self):
        # pre-propagated cache_map to save executing select statements
        # for each repo
        cache_map = CacheInvalidation.get_cache_map()

        for dbr in self.db_repo_list:
            scmr = dbr.scm_instance_cached(cache_map)
            # check permission at this level
            if not HasRepoPermissionAny(
                *self.perm_set
            )(dbr.repo_name, 'get repo check'):
                continue

            try:
                last_change = scmr.last_change
                tip = h.get_changeset_safe(scmr, 'tip')
            except Exception:
                log.error(
                    '%s this repository is present in database but it '
                    'cannot be created as an scm instance, org_exc:%s'
                    % (dbr.repo_name, traceback.format_exc())
                )
                continue

            tmp_d = {}
            tmp_d['name'] = dbr.repo_name
            tmp_d['name_sort'] = tmp_d['name'].lower()
            tmp_d['raw_name'] = tmp_d['name'].lower()
            tmp_d['description'] = dbr.description
            tmp_d['description_sort'] = tmp_d['description'].lower()
            tmp_d['last_change'] = last_change
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
            tmp_d['tip'] = tip.raw_id
            tmp_d['tip_sort'] = tip.revision
            tmp_d['rev'] = tip.revision
            tmp_d['contact'] = dbr.user.full_contact
            tmp_d['contact_sort'] = tmp_d['contact']
            tmp_d['owner_sort'] = tmp_d['contact']
            tmp_d['repo_archives'] = list(scmr._get_archives())
            tmp_d['last_msg'] = tip.message
            tmp_d['author'] = tip.author
            tmp_d['dbrepo'] = dbr.get_dict()
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
            yield tmp_d
Example #20
0
    def __iter__(self):
        # pre-propagated cache_map to save executing select statements
        # for each repo
        cache_map = CacheInvalidation.get_cache_map()

        for dbr in self.db_repo_list:
            scmr = dbr.scm_instance_cached(cache_map)
            # check permission at this level
            if not HasRepoPermissionAny(*self.perm_set)(dbr.repo_name,
                                                        'get repo check'):
                continue

            try:
                last_change = scmr.last_change
                tip = h.get_changeset_safe(scmr, 'tip')
            except Exception:
                log.error('%s this repository is present in database but it '
                          'cannot be created as an scm instance, org_exc:%s' %
                          (dbr.repo_name, traceback.format_exc()))
                continue

            tmp_d = {}
            tmp_d['name'] = dbr.repo_name
            tmp_d['name_sort'] = tmp_d['name'].lower()
            tmp_d['raw_name'] = tmp_d['name'].lower()
            tmp_d['description'] = dbr.description
            tmp_d['description_sort'] = tmp_d['description'].lower()
            tmp_d['last_change'] = last_change
            tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
            tmp_d['tip'] = tip.raw_id
            tmp_d['tip_sort'] = tip.revision
            tmp_d['rev'] = tip.revision
            tmp_d['contact'] = dbr.user.full_contact
            tmp_d['contact_sort'] = tmp_d['contact']
            tmp_d['owner_sort'] = tmp_d['contact']
            tmp_d['repo_archives'] = list(scmr._get_archives())
            tmp_d['last_msg'] = tip.message
            tmp_d['author'] = tip.author
            tmp_d['dbrepo'] = dbr.get_dict()
            tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
            yield tmp_d
Example #21
0
    def upgrade(self):
        """
        Upgrades given database schema to given revision following
        all needed steps, to perform the upgrade

        """

        from rhodecode.lib.dbmigrate.migrate.versioning import api
        from rhodecode.lib.dbmigrate.migrate.exceptions import \
            DatabaseNotControlledError

        if 'sqlite' in self.dburi:
            print (
               '********************** WARNING **********************\n'
               'Make sure your version of sqlite is at least 3.7.X.  \n'
               'Earlier versions are known to fail on some migrations\n'
               '*****************************************************\n'
            )
        upgrade = ask_ok('You are about to perform database upgrade, make '
                         'sure You backed up your database before. '
                         'Continue ? [y/n]')
        if not upgrade:
            sys.exit('No upgrade performed')

        repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
                             'rhodecode/lib/dbmigrate')
        db_uri = self.dburi

        try:
            curr_version = api.db_version(db_uri, repository_path)
            msg = ('Found current database under version'
                 ' control with version %s' % curr_version)

        except (RuntimeError, DatabaseNotControlledError):
            curr_version = 1
            msg = ('Current database is not under version control. Setting'
                   ' as version %s' % curr_version)
            api.version_control(db_uri, repository_path, curr_version)

        notify(msg)

        if curr_version == __dbversion__:
            sys.exit('This database is already at the newest version')

        # clear cache keys
        log.info("Clearing cache keys now...")
        CacheInvalidation.clear_cache()

        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
        notify('attempting to do database upgrade from '
               'version %s to version %s' % (curr_version, __dbversion__))

        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
        _step = None
        for step in upgrade_steps:
            notify('performing upgrade step %s' % step)
            time.sleep(2)

            api.upgrade(db_uri, repository_path, step)
            notify('schema upgrade for step %s completed' % (step,))

            fixture = 'step_%s' % step
            notify('performing fixture step %s' % fixture)
            getattr(UpgradeSteps(self), fixture)()
            self.sa.commit()
            notify('fixture %s completed' % (fixture,))
            _step = step

        notify('upgrade to version %s successful' % _step)
Example #22
0
def repo2db_mapper(initial_repo_list,
                   remove_obsolete=False,
                   install_git_hook=False):
    """
    maps all repos given in initial_repo_list, non existing repositories
    are created, if remove_obsolete is True it also check for db entries
    that are not in initial_repo_list and removes them.

    :param initial_repo_list: list of repositories found by scanning methods
    :param remove_obsolete: check for obsolete entries in database
    :param install_git_hook: if this is True, also check and install githook
        for a repo if missing
    """
    from rhodecode.model.repo import RepoModel
    from rhodecode.model.scm import ScmModel
    sa = meta.Session()
    rm = RepoModel()
    user = sa.query(User).filter(User.admin == True).first()
    if user is None:
        raise Exception('Missing administrative account!')
    added = []

    ##creation defaults
    defs = RhodeCodeSetting.get_default_repo_settings(strip_prefix=True)
    enable_statistics = defs.get('repo_enable_statistics')
    enable_locking = defs.get('repo_enable_locking')
    enable_downloads = defs.get('repo_enable_downloads')
    private = defs.get('repo_private')

    for name, repo in initial_repo_list.items():
        group = map_groups(name)
        db_repo = rm.get_by_repo_name(name)
        # found repo that is on filesystem not in RhodeCode database
        if not db_repo:
            log.info('repository %s not found, creating now' % name)
            added.append(name)
            desc = (repo.description if repo.description != 'unknown' else
                    '%s repository' % name)

            new_repo = rm.create_repo(repo_name=name,
                                      repo_type=repo.alias,
                                      description=desc,
                                      repos_group=getattr(
                                          group, 'group_id', None),
                                      owner=user,
                                      just_db=True,
                                      enable_locking=enable_locking,
                                      enable_downloads=enable_downloads,
                                      enable_statistics=enable_statistics,
                                      private=private)
            # we added that repo just now, and make sure it has githook
            # installed
            if new_repo.repo_type == 'git':
                ScmModel().install_git_hook(new_repo.scm_instance)
            new_repo.update_changeset_cache()
        elif install_git_hook:
            if db_repo.repo_type == 'git':
                ScmModel().install_git_hook(db_repo.scm_instance)
        # during starting install all cache keys for all repositories in the
        # system, this will register all repos and multiple instances
        cache_key = CacheInvalidation._get_cache_key(name)
        log.debug("Creating invalidation cache key for %s: %s", name,
                  cache_key)
        CacheInvalidation.invalidate(name)

    sa.commit()
    removed = []
    if remove_obsolete:
        # remove from database those repositories that are not in the filesystem
        for repo in sa.query(Repository).all():
            if repo.repo_name not in initial_repo_list.keys():
                log.debug("Removing non-existing repository found in db `%s`" %
                          repo.repo_name)
                try:
                    sa.delete(repo)
                    sa.commit()
                    removed.append(repo.repo_name)
                except Exception:
                    #don't hold further removals on error
                    log.error(traceback.format_exc())
                    sa.rollback()
    return added, removed
Example #23
0
    def upgrade(self):
        """
        Upgrades given database schema to given revision following
        all needed steps, to perform the upgrade

        """

        from rhodecode.lib.dbmigrate.migrate.versioning import api
        from rhodecode.lib.dbmigrate.migrate.exceptions import \
            DatabaseNotControlledError

        if 'sqlite' in self.dburi:
            print('********************** WARNING **********************\n'
                  'Make sure your version of sqlite is at least 3.7.X.  \n'
                  'Earlier versions are known to fail on some migrations\n'
                  '*****************************************************\n')
        upgrade = ask_ok('You are about to perform database upgrade, make '
                         'sure You backed up your database before. '
                         'Continue ? [y/n]')
        if not upgrade:
            sys.exit('No upgrade performed')

        repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))),
                             'rhodecode/lib/dbmigrate')
        db_uri = self.dburi

        try:
            curr_version = api.db_version(db_uri, repository_path)
            msg = ('Found current database under version'
                   ' control with version %s' % curr_version)

        except (RuntimeError, DatabaseNotControlledError):
            curr_version = 1
            msg = ('Current database is not under version control. Setting'
                   ' as version %s' % curr_version)
            api.version_control(db_uri, repository_path, curr_version)

        notify(msg)

        if curr_version == __dbversion__:
            sys.exit('This database is already at the newest version')

        # clear cache keys
        log.info("Clearing cache keys now...")
        CacheInvalidation.clear_cache()

        upgrade_steps = range(curr_version + 1, __dbversion__ + 1)
        notify('attempting to do database upgrade from '
               'version %s to version %s' % (curr_version, __dbversion__))

        # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE
        _step = None
        for step in upgrade_steps:
            notify('performing upgrade step %s' % step)
            time.sleep(2)

            api.upgrade(db_uri, repository_path, step)
            notify('schema upgrade for step %s completed' % (step, ))

            fixture = 'step_%s' % step
            notify('performing fixture step %s' % fixture)
            getattr(UpgradeSteps(self), fixture)()
            self.sa.commit()
            notify('fixture %s completed' % (fixture, ))
            _step = step

        notify('upgrade to version %s successful' % _step)
Example #24
0
def repo2db_mapper(initial_repo_list, remove_obsolete=False,
                   install_git_hook=False):
    """
    maps all repos given in initial_repo_list, non existing repositories
    are created, if remove_obsolete is True it also check for db entries
    that are not in initial_repo_list and removes them.

    :param initial_repo_list: list of repositories found by scanning methods
    :param remove_obsolete: check for obsolete entries in database
    :param install_git_hook: if this is True, also check and install githook
        for a repo if missing
    """
    from rhodecode.model.repo import RepoModel
    from rhodecode.model.scm import ScmModel
    sa = meta.Session()
    rm = RepoModel()
    user = sa.query(User).filter(User.admin == True).first()
    if user is None:
        raise Exception('Missing administrative account!')
    added = []

#    # clear cache keys
#    log.debug("Clearing cache keys now...")
#    CacheInvalidation.clear_cache()
#    sa.commit()

    for name, repo in initial_repo_list.items():
        group = map_groups(name)
        db_repo = rm.get_by_repo_name(name)
        # found repo that is on filesystem not in RhodeCode database
        if not db_repo:
            log.info('repository %s not found, creating now' % name)
            added.append(name)
            desc = (repo.description
                    if repo.description != 'unknown'
                    else '%s repository' % name)
            new_repo = rm.create_repo(
                repo_name=name,
                repo_type=repo.alias,
                description=desc,
                repos_group=getattr(group, 'group_id', None),
                owner=user,
                just_db=True
            )
            # we added that repo just now, and make sure it has githook
            # installed
            if new_repo.repo_type == 'git':
                ScmModel().install_git_hook(new_repo.scm_instance)
        elif install_git_hook:
            if db_repo.repo_type == 'git':
                ScmModel().install_git_hook(db_repo.scm_instance)
        # during starting install all cache keys for all repositories in the
        # system, this will register all repos and multiple instances
        key, _prefix, _org_key = CacheInvalidation._get_key(name)
        CacheInvalidation.invalidate(name)
        log.debug("Creating a cache key for %s instance_id=>`%s`"
                  % (name, _prefix or '-'))

    sa.commit()
    removed = []
    if remove_obsolete:
        # remove from database those repositories that are not in the filesystem
        for repo in sa.query(Repository).all():
            if repo.repo_name not in initial_repo_list.keys():
                log.debug("Removing non-existing repository found in db `%s`" %
                          repo.repo_name)
                try:
                    sa.delete(repo)
                    sa.commit()
                    removed.append(repo.repo_name)
                except:
                    #don't hold further removals on error
                    log.error(traceback.format_exc())
                    sa.rollback()

    return added, removed