def fake_tmpl_context(_url): _c = AttributeDict() _c.visual = AttributeDict() _c.visual.use_gravatar = True _c.visual.gravatar_url = _url return _c
def _before(self, *args, **kwargs): """ _before is called before controller methods and after __call__ """ c.kallithea_version = __version__ rc_config = Setting.get_app_settings() # Visual options c.visual = AttributeDict({}) ## DB stored c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon')) c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon')) c.visual.stylify_metatags = str2bool(rc_config.get('stylify_metatags')) c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) c.visual.repository_fields = str2bool(rc_config.get('repository_fields')) c.visual.show_version = str2bool(rc_config.get('show_version')) c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar')) c.visual.gravatar_url = rc_config.get('gravatar_url') c.ga_code = rc_config.get('ga_code') # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code if c.ga_code and '<' not in c.ga_code: c.ga_code = '''<script type="text/javascript"> var _gaq = _gaq || []; _gaq.push(['_setAccount', '%s']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })(); </script>''' % c.ga_code c.site_name = rc_config.get('title') c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') ## INI stored c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True)) c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True)) c.instance_id = config.get('instance_id') c.issues_url = config.get('bugtracker', url('issues_url')) # END CONFIG VARS c.repo_name = get_repo_slug(request) # can be empty c.backends = BACKENDS.keys() c.unread_notifications = NotificationModel() \ .get_unread_cnt_for_user(request.authuser.user_id) self.cut_off_limit = safe_int(config.get('cut_off_limit')) c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() self.scm_model = ScmModel()
def update(self, gist, description, owner, gist_mapping, gist_type, lifetime): gist = self._get_gist(gist) gist_repo = gist.scm_instance lifetime = safe_int(lifetime, -1) if lifetime == 0: # preserve old value gist_expires = gist.gist_expires else: gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 #calculate operation type based on given data gist_mapping_op = {} for k, v in gist_mapping.items(): # add, mod, del if not v['org_filename'] and v['filename']: op = 'add' elif v['org_filename'] and not v['filename']: op = 'del' else: op = 'mod' v['op'] = op gist_mapping_op[k] = v gist.gist_description = description gist.gist_expires = gist_expires gist.owner = owner gist.gist_type = gist_type self.sa.add(gist) self.sa.flush() message = 'updated file' message += 's: ' if len(gist_mapping) > 1 else ': ' message += ', '.join([x for x in gist_mapping]) #fake Kallithea Repository object fake_repo = AttributeDict(dict( repo_name=gist_repo.path, scm_instance_no_cache=lambda: gist_repo, )) self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id, owner.user_id, gist.gist_type, gist.gist_expires) ScmModel().update_nodes( user=owner.user_id, repo=fake_repo, message=message, nodes=gist_mapping_op, trigger_push_hook=False ) return gist
def _atom_feed(self, repos, public=True): journal = self._get_journal_data(repos) if public: _link = h.canonical_url('public_journal_atom') _desc = '%s %s %s' % (c.site_name, _('public journal'), 'atom feed') else: _link = h.canonical_url('journal_atom') _desc = '%s %s %s' % (c.site_name, _('journal'), 'atom feed') feed = Atom1Feed(title=_desc, link=_link, description=_desc, language=self.language, ttl=self.ttl) for entry in journal[:self.feed_nr]: user = entry.user if user is None: #fix deleted users user = AttributeDict({ 'short_contact': entry.username, 'email': '', 'full_contact': '' }) action, action_extra, ico = h.action_parser(entry, feed=True) title = "%s - %s %s" % (user.short_contact, action(), entry.repository.repo_name) desc = action_extra() _url = None if entry.repository is not None: _url = h.canonical_url('changelog_home', repo_name=entry.repository.repo_name) feed.add_item(title=title, pubdate=entry.action_date, link=_url or h.canonical_url(''), author_email=user.email, author_name=user.full_contact, description=desc) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _feed(self, repos, feeder, link, desc): response.content_type = feeder.content_type journal = self._get_journal_data(repos) header = dict( title=desc, link=link, description=desc, ) entries=[] for entry in journal[:feed_nr]: user = entry.user if user is None: # fix deleted users user = AttributeDict({'short_contact': entry.username, 'email': '', 'full_contact': ''}) action, action_extra, ico = h.action_parser(entry, feed=True) title = "%s - %s %s" % (user.short_contact, action(), entry.repository.repo_name) _url = None if entry.repository is not None: _url = h.canonical_url('changelog_home', repo_name=entry.repository.repo_name) entries.append(dict( title=title, pubdate=entry.action_date, link=_url or h.canonical_url(''), author_email=user.email, author_name=user.full_name_or_username, description=action_extra(), )) return feeder.render(header, entries)
def get_cs_links(): revs_limit = 3 # display this amount always revs_top_limit = 50 # show upto this amount of changesets hidden revs_ids = action_params.split(',') deleted = user_log.repository is None if deleted: return ','.join(revs_ids) repo_name = user_log.repository.repo_name def lnk(rev, repo_name): lazy_cs = False title_ = None url_ = '#' if isinstance(rev, BaseChangeset) or isinstance(rev, AttributeDict): if rev.op and rev.ref_name: if rev.op == 'delete_branch': lbl = _('Deleted branch: %s') % rev.ref_name elif rev.op == 'tag': lbl = _('Created tag: %s') % rev.ref_name else: lbl = 'Unknown operation %s' % rev.op else: lazy_cs = True lbl = rev.short_id[:8] url_ = url('changeset_home', repo_name=repo_name, revision=rev.raw_id) else: # changeset cannot be found - it might have been stripped or removed lbl = rev[:12] title_ = _('Changeset %s not found') % lbl if parse_cs: return link_to(lbl, url_, title=title_, **{'data-toggle': 'tooltip'}) return link_to(lbl, url_, class_='lazy-cs' if lazy_cs else '', **{'data-raw_id': rev.raw_id, 'data-repo_name': repo_name}) def _get_op(rev_txt): _op = None _name = rev_txt if len(rev_txt.split('=>')) == 2: _op, _name = rev_txt.split('=>') return _op, _name revs = [] if len([v for v in revs_ids if v != '']) > 0: repo = None for rev in revs_ids[:revs_top_limit]: _op, _name = _get_op(rev) # we want parsed changesets, or new log store format is bad if parse_cs: try: if repo is None: repo = user_log.repository.scm_instance _rev = repo.get_changeset(rev) revs.append(_rev) except ChangesetDoesNotExistError: log.error('cannot find revision %s in this repo', rev) revs.append(rev) else: _rev = AttributeDict({ 'short_id': rev[:12], 'raw_id': rev, 'message': '', 'op': _op, 'ref_name': _name }) revs.append(_rev) cs_links = [" " + ', '.join( [lnk(rev, repo_name) for rev in revs[:revs_limit]] )] _op1, _name1 = _get_op(revs_ids[0]) _op2, _name2 = _get_op(revs_ids[-1]) _rev = '%s...%s' % (_name1, _name2) compare_view = ( ' <div class="compare_view" data-toggle="tooltip" title="%s">' '<a href="%s">%s</a> </div>' % ( _('Show all combined changesets %s->%s') % ( revs_ids[0][:12], revs_ids[-1][:12] ), url('changeset_home', repo_name=repo_name, revision=_rev ), _('Compare view') ) ) # if we have exactly one more than normally displayed # just display it, takes less space than displaying # "and 1 more revisions" if len(revs_ids) == revs_limit + 1: cs_links.append(", " + lnk(revs[revs_limit], repo_name)) # hidden-by-default ones if len(revs_ids) > revs_limit + 1: uniq_id = revs_ids[0] html_tmpl = ( '<span> %s <a class="show_more" id="_%s" ' 'href="#more">%s</a> %s</span>' ) if not feed: cs_links.append(html_tmpl % ( _('and'), uniq_id, _('%s more') % (len(revs_ids) - revs_limit), _('revisions') ) ) if not feed: html_tmpl = '<span id="%s" style="display:none">, %s </span>' else: html_tmpl = '<span id="%s"> %s </span>' morelinks = ', '.join( [lnk(rev, repo_name) for rev in revs[revs_limit:]] ) if len(revs_ids) > revs_top_limit: morelinks += ', ...' cs_links.append(html_tmpl % (uniq_id, morelinks)) if len(revs) > 1: cs_links.append(compare_view) return ''.join(cs_links)
def create(self, description, owner, gist_mapping, gist_type=Gist.GIST_PUBLIC, lifetime=-1): """ :param description: description of the gist :param owner: user who created this gist :param gist_mapping: mapping {filename:{'content':content},...} :param gist_type: type of gist private/public :param lifetime: in minutes, -1 == forever """ owner = self._get_user(owner) gist_id = safe_unicode(unique_id(20)) lifetime = safe_int(lifetime, -1) gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 log.debug('set GIST expiration date to: %s' % (time_to_datetime(gist_expires) if gist_expires != -1 else 'forever')) #create the Database version gist = Gist() gist.gist_description = description gist.gist_access_id = gist_id gist.gist_owner = owner.user_id gist.gist_expires = gist_expires gist.gist_type = safe_unicode(gist_type) self.sa.add(gist) self.sa.flush() if gist_type == Gist.GIST_PUBLIC: # use DB ID for easy to use GIST ID gist_id = safe_unicode(gist.gist_id) gist.gist_access_id = gist_id self.sa.add(gist) gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id) log.debug('Creating new %s GIST repo in %s' % (gist_type, gist_repo_path)) repo = RepoModel()._create_filesystem_repo( repo_name=gist_id, repo_type='hg', repo_group=GIST_STORE_LOC) processed_mapping = {} for filename in gist_mapping: if filename != os.path.basename(filename): raise Exception('Filename cannot be inside a directory') content = gist_mapping[filename]['content'] #TODO: expand support for setting explicit lexers # if lexer is None: # try: # guess_lexer = pygments.lexers.guess_lexer_for_filename # lexer = guess_lexer(filename,content) # except pygments.util.ClassNotFound: # lexer = 'text' processed_mapping[filename] = {'content': content} # now create single multifile commit message = 'added file' message += 's: ' if len(processed_mapping) > 1 else ': ' message += ', '.join([x for x in processed_mapping]) #fake Kallithea Repository object fake_repo = AttributeDict(dict( repo_name=gist_repo_path, scm_instance_no_cache=lambda: repo, )) ScmModel().create_nodes( user=owner.user_id, repo=fake_repo, message=message, nodes=processed_mapping, trigger_push_hook=False ) self._store_metadata(repo, gist.gist_id, gist.gist_access_id, owner.user_id, gist.gist_type, gist.gist_expires) return gist
def _before(self, *args, **kwargs): """ _before is called before controller methods and after __call__ """ if request.needs_csrf_check: # CSRF protection: Whenever a request has ambient authority (whether # through a session cookie or its origin IP address), it must include # the correct token, unless the HTTP method is GET or HEAD (and thus # guaranteed to be side effect free. In practice, the only situation # where we allow side effects without ambient authority is when the # authority comes from an API key; and that is handled above. from kallithea.lib import helpers as h token = request.POST.get(h.session_csrf_secret_name) if not token or token != h.session_csrf_secret_token(): log.error('CSRF check failed') raise webob.exc.HTTPForbidden() c.kallithea_version = __version__ rc_config = Setting.get_app_settings() # Visual options c.visual = AttributeDict({}) ## DB stored c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon')) c.visual.show_private_icon = str2bool( rc_config.get('show_private_icon')) c.visual.stylify_metalabels = str2bool( rc_config.get('stylify_metalabels')) c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) c.visual.admin_grid_items = safe_int( rc_config.get('admin_grid_items', 100)) c.visual.repository_fields = str2bool( rc_config.get('repository_fields')) c.visual.show_version = str2bool(rc_config.get('show_version')) c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar')) c.visual.gravatar_url = rc_config.get('gravatar_url') c.ga_code = rc_config.get('ga_code') # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code if c.ga_code and '<' not in c.ga_code: c.ga_code = '''<script type="text/javascript"> var _gaq = _gaq || []; _gaq.push(['_setAccount', '%s']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })(); </script>''' % c.ga_code c.site_name = rc_config.get('title') c.clone_uri_tmpl = rc_config.get( 'clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI c.clone_ssh_tmpl = rc_config.get( 'clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH ## INI stored c.visual.allow_repo_location_change = str2bool( config.get('allow_repo_location_change', True)) c.visual.allow_custom_hooks_settings = str2bool( config.get('allow_custom_hooks_settings', True)) c.ssh_enabled = str2bool(config.get('ssh_enabled', False)) c.instance_id = config.get('instance_id') c.issues_url = config.get('bugtracker', url('issues_url')) # END CONFIG VARS c.repo_name = get_repo_slug(request) # can be empty c.backends = list(BACKENDS) self.cut_off_limit = safe_int(config.get('cut_off_limit')) c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() self.scm_model = ScmModel()