Exemple #1
0
    def commit_change(self,
                      repo,
                      filename,
                      content,
                      message,
                      vcs_type,
                      parent=None,
                      newfile=False):
        repo = Repository.get_by_repo_name(repo)
        _cs = parent
        if not parent:
            _cs = EmptyChangeset(alias=vcs_type)

        if newfile:
            nodes = {filename: {'content': content}}
            cs = ScmModel().create_nodes(
                user=TEST_USER_ADMIN_LOGIN,
                repo=repo,
                message=message,
                nodes=nodes,
                parent_cs=_cs,
                author=TEST_USER_ADMIN_LOGIN,
            )
        else:
            cs = ScmModel().commit_change(repo=repo.scm_instance,
                                          repo_name=repo.repo_name,
                                          cs=parent,
                                          user=TEST_USER_ADMIN_LOGIN,
                                          author=TEST_USER_ADMIN_LOGIN,
                                          message=message,
                                          content=content,
                                          f_path=filename)
        return cs
Exemple #2
0
    def __init__(self,
                 indexname=IDX_NAME,
                 index_location=None,
                 repo_location=None,
                 sa=None,
                 repo_list=None,
                 repo_update_list=None):
        self.indexname = indexname

        self.index_location = index_location
        if not index_location:
            raise Exception('You have to provide index location')

        self.repo_location = repo_location
        if not repo_location:
            raise Exception('You have to provide repositories location')

        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)

        #filter repo list
        if repo_list:
            #Fix non-ascii repo names to unicode
            repo_list = map(safe_unicode, repo_list)
            self.filtered_repo_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_list:
                    self.filtered_repo_paths[repo_name] = repo

            self.repo_paths = self.filtered_repo_paths

        #filter update repo list
        self.filtered_repo_update_paths = {}
        if repo_update_list:
            self.filtered_repo_update_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_update_list:
                    self.filtered_repo_update_paths[repo_name] = repo
            self.repo_paths = self.filtered_repo_update_paths

        self.initial = True
        if not os.path.isdir(self.index_location):
            os.makedirs(self.index_location)
            log.info('Cannot run incremental index since it does not '
                     'yet exist running full build')
        elif not exists_in(self.index_location, IDX_NAME):
            log.info('Running full index build as the file content '
                     'index does not exist')
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
            log.info('Running full index build as the changeset '
                     'index does not exist')
        else:
            self.initial = False
Exemple #3
0
    def update(self, repo_name):
        """
        PUT /repos/repo_name: Update an existing item"""
        # Forms posted to this method should contain a hidden field:
        #    <input type="hidden" name="_method" value="PUT" />
        # Or using helpers:
        #    h.form(url('repo', repo_name=ID),
        #           method='put')
        # url('repo', repo_name=ID)
        c.repo_info = self._load_repo(repo_name)
        c.active = 'settings'
        c.repo_fields = RepositoryField.query()\
            .filter(RepositoryField.repository == c.repo_info).all()
        self.__load_defaults(c.repo_info)

        repo_model = RepoModel()
        changed_name = repo_name
        #override the choices with extracted revisions !
        choices, c.landing_revs = ScmModel().get_repo_landing_revs(repo_name)
        c.landing_revs_choices = choices
        repo = Repository.get_by_repo_name(repo_name)
        old_data = {
            'repo_name': repo_name,
            'repo_group': repo.group.get_dict() if repo.group else {},
            'repo_type': repo.repo_type,
        }
        _form = RepoForm(edit=True,
                         old_data=old_data,
                         repo_groups=c.repo_groups_choices,
                         landing_revs=c.landing_revs_choices)()

        try:
            form_result = _form.to_python(dict(request.POST))
            repo = repo_model.update(repo_name, **form_result)
            ScmModel().mark_for_invalidation(repo_name)
            h.flash(_('Repository %s updated successfully') % repo_name,
                    category='success')
            changed_name = repo.repo_name
            action_logger(self.authuser, 'admin_updated_repo', changed_name,
                          self.ip_addr, self.sa)
            Session().commit()
        except formencode.Invalid, errors:
            defaults = self.__load_data(repo_name)
            defaults.update(errors.value)
            return htmlfill.render(render('admin/repos/repo_edit.html'),
                                   defaults=defaults,
                                   errors=errors.error_dict or {},
                                   prefix_error=False,
                                   encoding="UTF-8",
                                   force_defaults=False)
Exemple #4
0
    def _invalidate_cache(self, repo_name):
        """
        Sets cache for this repository for invalidation on next access

        :param repo_name: full repo name, also a cache key
        """
        ScmModel().mark_for_invalidation(repo_name)
Exemple #5
0
    def edit_advanced_fork(self, repo_name):
        """
        Mark given repository as a fork of another

        :param repo_name:
        """
        try:
            fork_id = request.POST.get('id_fork_of')
            repo = ScmModel().mark_as_fork(repo_name, fork_id,
                                           request.authuser.username)
            fork = repo.fork.repo_name if repo.fork else _('Nothing')
            Session().commit()
            h.flash(_('Marked repository %s as fork of %s') %
                    (repo_name, fork),
                    category='success')
        except RepositoryError as e:
            log.error(traceback.format_exc())
            h.flash(e, category='error')
        except Exception as e:
            log.error(traceback.format_exc())
            h.flash(_('An error occurred during this operation'),
                    category='error')

        raise HTTPFound(
            location=url('edit_repo_advanced', repo_name=repo_name))
Exemple #6
0
    def test_index_trending_git(self):
        self.log_user()
        # codes stats
        self._enable_stats(base.GIT_REPO)

        ScmModel().mark_for_invalidation(base.GIT_REPO)
        # generate statistics first
        response = self.app.get(
            base.url(controller='summary',
                     action='statistics',
                     repo_name=base.GIT_REPO))
        response = self.app.get(
            base.url(controller='summary',
                     action='index',
                     repo_name=base.GIT_REPO))
        response.mustcontain(
            '[["py", {"count": 68, "desc": ["Python"]}], '
            '["rst", {"count": 16, "desc": ["Rst"]}], '
            '["css", {"count": 2, "desc": ["Css"]}], '
            '["sh", {"count": 2, "desc": ["Bash"]}], '
            '["bat", {"count": 1, "desc": ["Batch"]}], '
            '["cfg", {"count": 1, "desc": ["Ini"]}], '
            '["html", {"count": 1, "desc": ["EvoqueHtml", "Html"]}], '
            '["ini", {"count": 1, "desc": ["Ini"]}], '
            '["js", {"count": 1, "desc": ["Javascript"]}], '
            '["makefile", {"count": 1, "desc": ["Makefile", "Makefile"]}]]', )
Exemple #7
0
    def test_index_statistics_git(self):
        self.log_user()
        #codes stats
        self._enable_stats(GIT_REPO)

        ScmModel().mark_for_invalidation(GIT_REPO)
        response = self.app.get(
            url(controller='summary', action='statistics', repo_name=GIT_REPO))
Exemple #8
0
 def nodelist(self, repo_name, revision, f_path):
     if request.environ.get('HTTP_X_PARTIAL_XHR'):
         cs = self.__get_cs(revision)
         _d, _f = ScmModel().get_nodes(repo_name,
                                       cs.raw_id,
                                       f_path,
                                       flat=False)
         return {'nodes': _d + _f}
Exemple #9
0
    def settings_mapping(self):
        c.active = 'mapping'
        if request.POST:
            rm_obsolete = request.POST.get('destroy', False)
            install_git_hooks = request.POST.get('hooks', False)
            overwrite_git_hooks = request.POST.get('hooks_overwrite', False)
            invalidate_cache = request.POST.get('invalidate', False)
            log.debug('rescanning repo location with destroy obsolete=%s, '
                      'install git hooks=%s and '
                      'overwrite git hooks=%s' % (rm_obsolete, install_git_hooks, overwrite_git_hooks))

            filesystem_repos = ScmModel().repo_scan()
            added, removed = repo2db_mapper(filesystem_repos, rm_obsolete,
                                            install_git_hooks=install_git_hooks,
                                            user=request.authuser.username,
                                            overwrite_git_hooks=overwrite_git_hooks)
            added_msg = h.HTML(', ').join(
                h.link_to(safe_str(repo_name), h.url('summary_home', repo_name=repo_name)) for repo_name in added
            ) or '-'
            removed_msg = h.HTML(', ').join(
                safe_str(repo_name) for repo_name in removed
            ) or '-'
            h.flash(h.HTML(_('Repositories successfully rescanned. Added: %s. Removed: %s.')) %
                    (added_msg, removed_msg), category='success')

            if invalidate_cache:
                log.debug('invalidating all repositories cache')
                i = 0
                for repo in Repository.query():
                    try:
                        ScmModel().mark_for_invalidation(repo.repo_name)
                        i += 1
                    except VCSError as e:
                        log.warning('VCS error invalidating %s: %s', repo.repo_name, e)
                h.flash(_('Invalidated %s repositories') % i, category='success')

            raise HTTPFound(location=url('admin_settings_mapping'))

        defaults = Setting.get_app_settings()
        defaults.update(self._get_hg_ui_settings())

        return htmlfill.render(
            render('admin/settings/settings.html'),
            defaults=defaults,
            encoding="UTF-8",
            force_defaults=False)
Exemple #10
0
    def _before(self, *args, **kwargs):
        """
        _before is called before controller methods and after __call__
        """
        c.kallithea_version = __version__
        rc_config = Setting.get_app_settings()

        # Visual options
        c.visual = AttributeDict({})

        ## DB stored
        c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon'))
        c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon'))
        c.visual.stylify_metatags = str2bool(rc_config.get('stylify_metatags'))
        c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100))
        c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100))
        c.visual.repository_fields = str2bool(rc_config.get('repository_fields'))
        c.visual.show_version = str2bool(rc_config.get('show_version'))
        c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar'))
        c.visual.gravatar_url = rc_config.get('gravatar_url')

        c.ga_code = rc_config.get('ga_code')
        # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code
        if c.ga_code and '<' not in c.ga_code:
            c.ga_code = '''<script type="text/javascript">
                var _gaq = _gaq || [];
                _gaq.push(['_setAccount', '%s']);
                _gaq.push(['_trackPageview']);

                (function() {
                    var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
                    ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
                    var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
                    })();
            </script>''' % c.ga_code
        c.site_name = rc_config.get('title')
        c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl')

        ## INI stored
        c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True))
        c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True))

        c.instance_id = config.get('instance_id')
        c.issues_url = config.get('bugtracker', url('issues_url'))
        # END CONFIG VARS

        c.repo_name = get_repo_slug(request)  # can be empty
        c.backends = BACKENDS.keys()
        c.unread_notifications = NotificationModel() \
                        .get_unread_cnt_for_user(request.authuser.user_id)

        self.cut_off_limit = safe_int(config.get('cut_off_limit'))

        c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count()

        self.scm_model = ScmModel()
Exemple #11
0
 def command(self):
     #get SqlAlchemy session
     self._init_session()
     rm_obsolete = self.options.delete_obsolete
     log.info('Now scanning root location for new repos...')
     added, removed = repo2db_mapper(ScmModel().repo_scan(),
                                     remove_obsolete=rm_obsolete)
     added = ', '.join(added) or '-'
     removed = ', '.join(removed) or '-'
     log.info('Scan completed added: %s removed: %s' % (added, removed))
Exemple #12
0
    def update(self, gist, description, owner, gist_mapping, gist_type,
               lifetime):
        gist = self._get_gist(gist)
        gist_repo = gist.scm_instance

        lifetime = safe_int(lifetime, -1)
        if lifetime == 0:  # preserve old value
            gist_expires = gist.gist_expires
        else:
            gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1

        #calculate operation type based on given data
        gist_mapping_op = {}
        for k, v in gist_mapping.items():
            # add, mod, del
            if not v['org_filename'] and v['filename']:
                op = 'add'
            elif v['org_filename'] and not v['filename']:
                op = 'del'
            else:
                op = 'mod'

            v['op'] = op
            gist_mapping_op[k] = v

        gist.gist_description = description
        gist.gist_expires = gist_expires
        gist.owner = owner
        gist.gist_type = gist_type
        self.sa.add(gist)
        self.sa.flush()

        message = 'updated file'
        message += 's: ' if len(gist_mapping) > 1 else ': '
        message += ', '.join([x for x in gist_mapping])

        #fake Kallithea Repository object
        fake_repo = AttributeDict(dict(
            repo_name=gist_repo.path,
            scm_instance_no_cache=lambda: gist_repo,
        ))

        self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
                             owner.user_id, gist.gist_type, gist.gist_expires)

        ScmModel().update_nodes(
            user=owner.user_id,
            repo=fake_repo,
            message=message,
            nodes=gist_mapping_op,
            trigger_push_hook=False
        )

        return gist
Exemple #13
0
    def __load_defaults(self):
        if HasPermissionAny('hg.create.write_on_repogroup.true')():
            repo_group_perm_level = 'write'
        else:
            repo_group_perm_level = 'admin'
        c.repo_groups = AvailableRepoGroupChoices(['hg.create.repository'],
                                                  repo_group_perm_level)

        c.landing_revs_choices, c.landing_revs = ScmModel(
        ).get_repo_landing_revs()

        c.can_update = Ui.get_by_key('hooks', Ui.HOOK_UPDATE).ui_active
Exemple #14
0
 def take_action(self, args):
     rm_obsolete = args.delete_obsolete
     print 'Now scanning root location for new repos ...'
     added, removed = repo2db_mapper(ScmModel().repo_scan(),
                                     remove_obsolete=rm_obsolete)
     added = ', '.join(added) or '-'
     removed = ', '.join(removed) or '-'
     print 'Scan completed.'
     print 'Added: %s' % added
     if rm_obsolete:
         print 'Removed: %s' % removed
     else:
         print 'Missing: %s' % removed
Exemple #15
0
    def test_hooks_created_if_missing(self):
        """
        Tests if hooks are installed in repository if they are missing.
        """

        for hook, hook_path in self.kallithea_hooks.items():
            if os.path.exists(hook_path):
                os.remove(hook_path)

        ScmModel().install_git_hooks(repo=self.repo)

        for hook, hook_path in self.kallithea_hooks.items():
            assert os.path.exists(hook_path)
Exemple #16
0
    def __load_defaults(self, repo=None):
        top_perms = ['hg.create.repository']
        if HasPermissionAny('hg.create.write_on_repogroup.true')():
            repo_group_perm_level = 'write'
        else:
            repo_group_perm_level = 'admin'
        extras = [] if repo is None else [repo.group]

        c.repo_groups = AvailableRepoGroupChoices(top_perms,
                                                  repo_group_perm_level,
                                                  extras)

        c.landing_revs_choices, c.landing_revs = ScmModel(
        ).get_repo_landing_revs(repo)
Exemple #17
0
    def test_custom_hooks_forced_update(self):
        """
        Tests if hooks are forcefully updated even though they are custom hooks.
        """

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path, "w") as f:
                f.write("#!/bin/bash\n#CUSTOM_HOOK")

        ScmModel().install_git_hooks(repo=self.repo, force_create=True)

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path) as f:
                assert "KALLITHEA_HOOK_VER" in f.read()
Exemple #18
0
    def test_kallithea_hooks_updated(self):
        """
        Tests if hooks are updated if they are Kallithea hooks already.
        """

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path, "w") as f:
                f.write("KALLITHEA_HOOK_VER=0.0.0\nJUST_BOGUS")

        ScmModel().install_git_hooks(repo=self.repo)

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path) as f:
                assert "JUST_BOGUS" not in f.read()
Exemple #19
0
    def test_custom_hooks_untouched(self):
        """
        Tests if hooks are left untouched if they are not Kallithea hooks.
        """

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path, "w") as f:
                f.write("#!/bin/bash\n#CUSTOM_HOOK")

        ScmModel().install_git_hooks(repo=self.repo)

        for hook, hook_path in self.kallithea_hooks.items():
            with open(hook_path) as f:
                assert "CUSTOM_HOOK" in f.read()
Exemple #20
0
 def edit_remote(self, repo_name):
     c.repo_info = self._load_repo()
     c.active = 'remote'
     if request.POST:
         try:
             ScmModel().pull_changes(repo_name, request.authuser.username)
             h.flash(_('Pulled from remote location'), category='success')
         except Exception as e:
             log.error(traceback.format_exc())
             h.flash(
                 _('An error occurred during pull from remote location'),
                 category='error')
         raise HTTPFound(
             location=url('edit_repo_remote', repo_name=c.repo_name))
     return render('admin/repos/repo_edit.html')
Exemple #21
0
 def edit_remote(self, repo_name):
     """GET /repo_name/settings: Form to edit an existing item"""
     # url('edit_repo', repo_name=ID)
     c.repo_info = self._load_repo(repo_name)
     c.active = 'remote'
     if request.POST:
         try:
             ScmModel().pull_changes(repo_name, self.authuser.username)
             h.flash(_('Pulled from remote location'), category='success')
         except Exception, e:
             log.error(traceback.format_exc())
             h.flash(
                 _('An error occurred during pull from remote location'),
                 category='error')
         return redirect(url('edit_repo_remote', repo_name=c.repo_name))
Exemple #22
0
    def __load_data(self, repo_name=None):
        """
        Load defaults settings for edit, and update

        :param repo_name:
        """
        c.repo_info = self._load_repo(repo_name)
        self.__load_defaults(c.repo_info)

        ##override defaults for exact repo info here git/hg etc
        choices, c.landing_revs = ScmModel().get_repo_landing_revs(c.repo_info)
        c.landing_revs_choices = choices
        defaults = RepoModel()._get_defaults(repo_name)

        return defaults
Exemple #23
0
    def __init__(self, indexname=IDX_NAME, index_location=None,
                 repo_location=None, sa=None, repo_list=None,
                 repo_update_list=None):
        self.indexname = indexname

        self.index_location = index_location
        if not index_location:
            raise Exception('You have to provide index location')

        self.repo_location = repo_location
        if not repo_location:
            raise Exception('You have to provide repositories location')

        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)

        #filter repo list
        if repo_list:
            #Fix non-ascii repo names to unicode
            repo_list = map(safe_unicode, repo_list)
            self.filtered_repo_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_list:
                    self.filtered_repo_paths[repo_name] = repo

            self.repo_paths = self.filtered_repo_paths

        #filter update repo list
        self.filtered_repo_update_paths = {}
        if repo_update_list:
            self.filtered_repo_update_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_update_list:
                    self.filtered_repo_update_paths[repo_name] = repo
            self.repo_paths = self.filtered_repo_update_paths

        self.initial = True
        if not os.path.isdir(self.index_location):
            os.makedirs(self.index_location)
            log.info('Cannot run incremental index since it does not '
                     'yet exist running full build')
        elif not exists_in(self.index_location, IDX_NAME):
            log.info('Running full index build as the file content '
                     'index does not exist')
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
            log.info('Running full index build as the changeset '
                     'index does not exist')
        else:
            self.initial = False
Exemple #24
0
    def edit_caches(self, repo_name):
        """GET /repo_name/settings: Form to edit an existing item"""
        # url('edit_repo', repo_name=ID)
        c.repo_info = self._load_repo(repo_name)
        c.active = 'caches'
        if request.POST:
            try:
                ScmModel().mark_for_invalidation(repo_name, delete=True)
                Session().commit()
                h.flash(_('Cache invalidation successful'), category='success')
            except Exception, e:
                log.error(traceback.format_exc())
                h.flash(_('An error occurred during cache invalidation'),
                        category='error')

            return redirect(url('edit_repo_caches', repo_name=c.repo_name))
Exemple #25
0
    def edit_caches(self, repo_name):
        c.repo_info = self._load_repo()
        c.active = 'caches'
        if request.POST:
            try:
                ScmModel().mark_for_invalidation(repo_name)
                Session().commit()
                h.flash(_('Cache invalidation successful'), category='success')
            except Exception as e:
                log.error(traceback.format_exc())
                h.flash(_('An error occurred during cache invalidation'),
                        category='error')

            raise HTTPFound(
                location=url('edit_repo_caches', repo_name=c.repo_name))
        return render('admin/repos/repo_edit.html')
Exemple #26
0
    def update(self, repo_name):
        c.repo_info = self._load_repo()
        self.__load_defaults(c.repo_info)
        c.active = 'settings'
        c.repo_fields = RepositoryField.query() \
            .filter(RepositoryField.repository == c.repo_info).all()

        repo_model = RepoModel()
        changed_name = repo_name
        repo = Repository.get_by_repo_name(repo_name)
        old_data = {
            'repo_name': repo_name,
            'repo_group': repo.group.get_dict() if repo.group else {},
            'repo_type': repo.repo_type,
        }
        _form = RepoForm(edit=True,
                         old_data=old_data,
                         repo_groups=c.repo_groups,
                         landing_revs=c.landing_revs_choices)()

        try:
            form_result = _form.to_python(dict(request.POST))
            repo = repo_model.update(repo_name, **form_result)
            ScmModel().mark_for_invalidation(repo_name)
            h.flash(_('Repository %s updated successfully') % repo_name,
                    category='success')
            changed_name = repo.repo_name
            action_logger(request.authuser, 'admin_updated_repo', changed_name,
                          request.ip_addr)
            Session().commit()
        except formencode.Invalid as errors:
            log.info(errors)
            defaults = self.__load_data()
            defaults.update(errors.value)
            return htmlfill.render(render('admin/repos/repo_edit.html'),
                                   defaults=defaults,
                                   errors=errors.error_dict or {},
                                   prefix_error=False,
                                   encoding="UTF-8",
                                   force_defaults=False)

        except Exception:
            log.error(traceback.format_exc())
            h.flash(_('Error occurred during update of repository %s') %
                    repo_name,
                    category='error')
        raise HTTPFound(location=url('edit_repo', repo_name=changed_name))
Exemple #27
0
    def delete(self, repo_name):
        """
        DELETE /repos/repo_name: Delete an existing item"""
        # Forms posted to this method should contain a hidden field:
        #    <input type="hidden" name="_method" value="DELETE" />
        # Or using helpers:
        #    h.form(url('repo', repo_name=ID),
        #           method='delete')
        # url('repo', repo_name=ID)

        repo_model = RepoModel()
        repo = repo_model.get_by_repo_name(repo_name)
        if not repo:
            h.not_mapped_error(repo_name)
            return redirect(url('repos'))
        try:
            _forks = repo.forks.count()
            handle_forks = None
            if _forks and request.POST.get('forks'):
                do = request.POST['forks']
                if do == 'detach_forks':
                    handle_forks = 'detach'
                    h.flash(_('Detached %s forks') % _forks,
                            category='success')
                elif do == 'delete_forks':
                    handle_forks = 'delete'
                    h.flash(_('Deleted %s forks') % _forks, category='success')
            repo_model.delete(repo, forks=handle_forks)
            action_logger(self.authuser, 'admin_deleted_repo', repo_name,
                          self.ip_addr, self.sa)
            ScmModel().mark_for_invalidation(repo_name)
            h.flash(_('Deleted repository %s') % repo_name, category='success')
            Session().commit()
        except AttachedForksError:
            h.flash(_('Cannot delete %s it still contains attached forks') %
                    repo_name,
                    category='warning')

        except Exception:
            log.error(traceback.format_exc())
            h.flash(_('An error occurred during deletion of %s') % repo_name,
                    category='error')

        if repo.group:
            return redirect(
                url('repos_group_home', group_name=repo.group.group_name))
        return redirect(url('repos'))
Exemple #28
0
    def edit_advanced_fork(self, repo_name):
        """
        Mark given repository as a fork of another

        :param repo_name:
        """
        try:
            fork_id = request.POST.get('id_fork_of')
            repo = ScmModel().mark_as_fork(repo_name, fork_id,
                                           self.authuser.username)
            fork = repo.fork.repo_name if repo.fork else _('Nothing')
            Session().commit()
            h.flash(_('Marked repo %s as fork of %s') % (repo_name, fork),
                    category='success')
        except RepositoryError, e:
            log.error(traceback.format_exc())
            h.flash(str(e), category='error')
Exemple #29
0
    def create_repo(self, name, repo_group=None, **kwargs):
        if 'skip_if_exists' in kwargs:
            del kwargs['skip_if_exists']
            r = Repository.get_by_repo_name(name)
            if r:
                return r

        if isinstance(repo_group, RepoGroup):
            repo_group = repo_group.group_id

        form_data = self._get_repo_create_params(repo_name=name, **kwargs)
        form_data[
            'repo_group'] = repo_group  # patch form dict so it can be used directly by model
        cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
        RepoModel().create(form_data, cur_user)
        Session().commit()
        ScmModel().mark_for_invalidation(name)
        return Repository.get_by_repo_name(name)
def repo_scan(remove_missing):
    """Scan filesystem for repositories.

    Search the configured repository root for new repositories and add them
    into Kallithea.
    Additionally, report repositories that were previously known to Kallithea
    but are no longer present on the filesystem. If option --remove-missing is
    given, remove the missing repositories from the Kallithea database.
    """
    click.echo('Now scanning root location for new repos ...')
    added, removed = repo2db_mapper(ScmModel().repo_scan(),
                                    remove_obsolete=remove_missing)
    click.echo('Scan completed.')
    if added:
        click.echo('Added: %s' % ', '.join(added))
    if removed:
        click.echo(
            '%s: %s' %
            ('Removed' if remove_missing else 'Missing', ', '.join(removed)))
Exemple #31
0
    def create_fork(self, repo_to_fork, fork_name, **kwargs):
        repo_to_fork = Repository.get_by_repo_name(repo_to_fork)

        form_data = self._get_repo_create_params(
            repo_name=fork_name,
            fork_parent_id=repo_to_fork,
            repo_type=repo_to_fork.repo_type,
            **kwargs)
        # patch form dict so it can be used directly by model
        form_data['description'] = form_data['repo_description']
        form_data['private'] = form_data['repo_private']
        form_data['landing_rev'] = form_data['repo_landing_rev']

        owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
        RepoModel().create_fork(form_data, cur_user=owner)
        Session().commit()
        ScmModel().mark_for_invalidation(fork_name)
        r = Repository.get_by_repo_name(fork_name)
        assert r
        return r
Exemple #32
0
class WhooshIndexingDaemon(object):
    """
    Daemon for atomic indexing jobs
    """

    def __init__(self, indexname=IDX_NAME, index_location=None,
                 repo_location=None, sa=None, repo_list=None,
                 repo_update_list=None):
        self.indexname = indexname

        self.index_location = index_location
        if not index_location:
            raise Exception('You have to provide index location')

        self.repo_location = repo_location
        if not repo_location:
            raise Exception('You have to provide repositories location')

        self.repo_paths = ScmModel(sa).repo_scan(self.repo_location)

        #filter repo list
        if repo_list:
            #Fix non-ascii repo names to unicode
            repo_list = map(safe_unicode, repo_list)
            self.filtered_repo_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_list:
                    self.filtered_repo_paths[repo_name] = repo

            self.repo_paths = self.filtered_repo_paths

        #filter update repo list
        self.filtered_repo_update_paths = {}
        if repo_update_list:
            self.filtered_repo_update_paths = {}
            for repo_name, repo in self.repo_paths.items():
                if repo_name in repo_update_list:
                    self.filtered_repo_update_paths[repo_name] = repo
            self.repo_paths = self.filtered_repo_update_paths

        self.initial = True
        if not os.path.isdir(self.index_location):
            os.makedirs(self.index_location)
            log.info('Cannot run incremental index since it does not '
                     'yet exist running full build')
        elif not exists_in(self.index_location, IDX_NAME):
            log.info('Running full index build as the file content '
                     'index does not exist')
        elif not exists_in(self.index_location, CHGSET_IDX_NAME):
            log.info('Running full index build as the changeset '
                     'index does not exist')
        else:
            self.initial = False

    def _get_index_revision(self, repo):
        db_repo = Repository.get_by_repo_name(repo.name_unicode)
        landing_rev = 'tip'
        if db_repo:
            _rev_type, _rev = db_repo.landing_rev
            landing_rev = _rev
        return landing_rev

    def _get_index_changeset(self, repo, index_rev=None):
        if not index_rev:
            index_rev = self._get_index_revision(repo)
        cs = repo.get_changeset(index_rev)
        return cs

    def get_paths(self, repo):
        """
        recursive walk in root dir and return a set of all path in that dir
        based on repository walk function
        """
        index_paths_ = set()
        try:
            cs = self._get_index_changeset(repo)
            for _topnode, _dirs, files in cs.walk('/'):
                for f in files:
                    index_paths_.add(jn(safe_str(repo.path), safe_str(f.path)))

        except RepositoryError:
            log.debug(traceback.format_exc())
            pass
        return index_paths_

    def get_node(self, repo, path, index_rev=None):
        """
        gets a filenode based on given full path. It operates on string for
        hg git compatibility.

        :param repo: scm repo instance
        :param path: full path including root location
        :return: FileNode
        """
        # FIXME: paths should be normalized ... or even better: don't include repo.path
        path = safe_str(path)
        repo_path = safe_str(repo.path)
        assert path.startswith(repo_path)
        assert path[len(repo_path)] in (os.path.sep, os.path.altsep)
        node_path = path[len(repo_path) + 1:]
        cs = self._get_index_changeset(repo, index_rev=index_rev)
        node = cs.get_node(node_path)
        return node

    def is_indexable_node(self, node):
        """
        Just index the content of chosen files, skipping binary files
        """
        return (node.extension in INDEX_EXTENSIONS or node.name in INDEX_FILENAMES) and \
               not node.is_binary

    def get_node_mtime(self, node):
        return mktime(node.last_changeset.date.timetuple())

    def add_doc(self, writer, path, repo, repo_name, index_rev=None):
        """
        Adding doc to writer this function itself fetches data from
        the instance of vcs backend
        """
        try:
            node = self.get_node(repo, path, index_rev)
        except (ChangesetError, NodeDoesNotExistError):
            log.debug("couldn't add doc - %s did not have %r at %s", repo, path, index_rev)
            return 0, 0

        indexed = indexed_w_content = 0
        if self.is_indexable_node(node):
            u_content = node.content
            if not isinstance(u_content, unicode):
                log.warning('  >> %s Could not get this content as unicode '
                            'replacing with empty content' % path)
                u_content = u''
            else:
                log.debug('    >> %s [WITH CONTENT]', path)
                indexed_w_content += 1

        else:
            log.debug('    >> %s', path)
            # just index file name without it's content
            u_content = u''
            indexed += 1

        p = safe_unicode(path)
        writer.add_document(
            fileid=p,
            owner=unicode(repo.contact),
            repository=safe_unicode(repo_name),
            path=p,
            content=u_content,
            modtime=self.get_node_mtime(node),
            extension=node.extension
        )
        return indexed, indexed_w_content

    def index_changesets(self, writer, repo_name, repo, start_rev=None):
        """
        Add all changeset in the vcs repo starting at start_rev
        to the index writer

        :param writer: the whoosh index writer to add to
        :param repo_name: name of the repository from whence the
          changeset originates including the repository group
        :param repo: the vcs repository instance to index changesets for,
          the presumption is the repo has changesets to index
        :param start_rev=None: the full sha id to start indexing from
          if start_rev is None then index from the first changeset in
          the repo
        """

        if start_rev is None:
            start_rev = repo[0].raw_id

        log.debug('indexing changesets in %s starting at rev: %s',
                  repo_name, start_rev)

        indexed = 0
        cs_iter = repo.get_changesets(start=start_rev)
        total = len(cs_iter)
        for cs in cs_iter:
            log.debug('    >> %s/%s', cs, total)
            writer.add_document(
                raw_id=unicode(cs.raw_id),
                owner=unicode(repo.contact),
                date=cs._timestamp,
                repository=safe_unicode(repo_name),
                author=cs.author,
                message=cs.message,
                last=cs.last,
                added=u' '.join([safe_unicode(node.path) for node in cs.added]).lower(),
                removed=u' '.join([safe_unicode(node.path) for node in cs.removed]).lower(),
                changed=u' '.join([safe_unicode(node.path) for node in cs.changed]).lower(),
                parents=u' '.join([cs.raw_id for cs in cs.parents]),
            )
            indexed += 1

        log.debug('indexed %d changesets for repo %s', indexed, repo_name)
        return indexed

    def index_files(self, file_idx_writer, repo_name, repo):
        """
        Index files for given repo_name

        :param file_idx_writer: the whoosh index writer to add to
        :param repo_name: name of the repository we're indexing
        :param repo: instance of vcs repo
        """
        i_cnt = iwc_cnt = 0
        log.debug('building index for %s @revision:%s', repo.path,
                                                self._get_index_revision(repo))
        index_rev = self._get_index_revision(repo)
        for idx_path in self.get_paths(repo):
            i, iwc = self.add_doc(file_idx_writer, idx_path, repo, repo_name, index_rev)
            i_cnt += i
            iwc_cnt += iwc

        log.debug('added %s files %s with content for repo %s',
                  i_cnt + iwc_cnt, iwc_cnt, repo.path)
        return i_cnt, iwc_cnt

    def update_changeset_index(self):
        idx = open_dir(self.index_location, indexname=CHGSET_IDX_NAME)

        with idx.searcher() as searcher:
            writer = idx.writer()
            writer_is_dirty = False
            try:
                indexed_total = 0
                repo_name = None
                for repo_name, repo in self.repo_paths.items():
                    # skip indexing if there aren't any revs in the repo
                    num_of_revs = len(repo)
                    if num_of_revs < 1:
                        continue

                    qp = QueryParser('repository', schema=CHGSETS_SCHEMA)
                    q = qp.parse(u"last:t AND %s" % repo_name)

                    results = searcher.search(q)

                    # default to scanning the entire repo
                    last_rev = 0
                    start_id = None

                    if len(results) > 0:
                        # assuming that there is only one result, if not this
                        # may require a full re-index.
                        start_id = results[0]['raw_id']
                        last_rev = repo.get_changeset(revision=start_id).revision

                    # there are new changesets to index or a new repo to index
                    if last_rev == 0 or num_of_revs > last_rev + 1:
                        # delete the docs in the index for the previous
                        # last changeset(s)
                        for hit in results:
                            q = qp.parse(u"last:t AND %s AND raw_id:%s" %
                                            (repo_name, hit['raw_id']))
                            writer.delete_by_query(q)

                        # index from the previous last changeset + all new ones
                        indexed_total += self.index_changesets(writer,
                                                repo_name, repo, start_id)
                        writer_is_dirty = True
                log.debug('indexed %s changesets for repo %s',
                          indexed_total, repo_name
                )
            finally:
                if writer_is_dirty:
                    log.debug('>> COMMITING CHANGES TO CHANGESET INDEX<<')
                    writer.commit(merge=True)
                    log.debug('>>> FINISHED REBUILDING CHANGESET INDEX <<<')
                else:
                    log.debug('>> NOTHING TO COMMIT TO CHANGESET INDEX<<')

    def update_file_index(self):
        log.debug((u'STARTING INCREMENTAL INDEXING UPDATE FOR EXTENSIONS %s '
                   'AND REPOS %s') % (INDEX_EXTENSIONS, self.repo_paths.keys()))

        idx = open_dir(self.index_location, indexname=self.indexname)
        # The set of all paths in the index
        indexed_paths = set()
        # The set of all paths we need to re-index
        to_index = set()

        writer = idx.writer()
        writer_is_dirty = False
        try:
            with idx.reader() as reader:

                # Loop over the stored fields in the index
                for fields in reader.all_stored_fields():
                    indexed_path = fields['path']
                    indexed_repo_path = fields['repository']
                    indexed_paths.add(indexed_path)

                    if not indexed_repo_path in self.filtered_repo_update_paths:
                        continue

                    repo = self.repo_paths[indexed_repo_path]

                    try:
                        node = self.get_node(repo, indexed_path)
                        # Check if this file was changed since it was indexed
                        indexed_time = fields['modtime']
                        mtime = self.get_node_mtime(node)
                        if mtime > indexed_time:
                            # The file has changed, delete it and add it to
                            # the list of files to reindex
                            log.debug(
                                'adding to reindex list %s mtime: %s vs %s',
                                    indexed_path, mtime, indexed_time
                            )
                            writer.delete_by_term('fileid', indexed_path)
                            writer_is_dirty = True

                            to_index.add(indexed_path)
                    except (ChangesetError, NodeDoesNotExistError):
                        # This file was deleted since it was indexed
                        log.debug('removing from index %s', indexed_path)
                        writer.delete_by_term('path', indexed_path)
                        writer_is_dirty = True

            # Loop over the files in the filesystem
            # Assume we have a function that gathers the filenames of the
            # documents to be indexed
            ri_cnt_total = 0  # indexed
            riwc_cnt_total = 0  # indexed with content
            for repo_name, repo in self.repo_paths.items():
                # skip indexing if there aren't any revisions
                if len(repo) < 1:
                    continue
                ri_cnt = 0   # indexed
                riwc_cnt = 0  # indexed with content
                for path in self.get_paths(repo):
                    path = safe_unicode(path)
                    if path in to_index or path not in indexed_paths:

                        # This is either a file that's changed, or a new file
                        # that wasn't indexed before. So index it!
                        i, iwc = self.add_doc(writer, path, repo, repo_name)
                        writer_is_dirty = True
                        log.debug('re indexing %s', path)
                        ri_cnt += i
                        ri_cnt_total += 1
                        riwc_cnt += iwc
                        riwc_cnt_total += iwc
                log.debug('added %s files %s with content for repo %s',
                             ri_cnt + riwc_cnt, riwc_cnt, repo.path
                )
            log.debug('indexed %s files in total and %s with content',
                        ri_cnt_total, riwc_cnt_total
            )
        finally:
            if writer_is_dirty:
                log.debug('>> COMMITING CHANGES TO FILE INDEX <<')
                writer.commit(merge=True)
                log.debug('>>> FINISHED REBUILDING FILE INDEX <<<')
            else:
                log.debug('>> NOTHING TO COMMIT TO FILE INDEX <<')
                writer.cancel()

    def build_indexes(self):
        if os.path.exists(self.index_location):
            log.debug('removing previous index')
            rmtree(self.index_location)

        if not os.path.exists(self.index_location):
            os.mkdir(self.index_location)

        chgset_idx = create_in(self.index_location, CHGSETS_SCHEMA,
                               indexname=CHGSET_IDX_NAME)
        chgset_idx_writer = chgset_idx.writer()

        file_idx = create_in(self.index_location, SCHEMA, indexname=IDX_NAME)
        file_idx_writer = file_idx.writer()
        log.debug('BUILDING INDEX FOR EXTENSIONS %s '
                  'AND REPOS %s' % (INDEX_EXTENSIONS, self.repo_paths.keys()))

        for repo_name, repo in self.repo_paths.items():
            # skip indexing if there aren't any revisions
            if len(repo) < 1:
                continue

            self.index_files(file_idx_writer, repo_name, repo)
            self.index_changesets(chgset_idx_writer, repo_name, repo)

        log.debug('>> COMMITING CHANGES <<')
        file_idx_writer.commit(merge=True)
        chgset_idx_writer.commit(merge=True)
        log.debug('>>> FINISHED BUILDING INDEX <<<')

    def update_indexes(self):
        self.update_file_index()
        self.update_changeset_index()

    def run(self, full_index=False):
        """Run daemon"""
        if full_index or self.initial:
            self.build_indexes()
        else:
            self.update_indexes()