def index(self, title='', text='', state='draft', labels='', limit=10, page=0, **kw): if request.method == 'POST': require_access(c.app, 'write') post = BM.BlogPost.new(title=title, state=state, text=text, labels=labels.split(','), **kw) return exc.HTTPCreated(headers=dict(Location=h.absurl('/rest' + post.url()))) else: result = RootController().index(limit=limit, page=page) posts = result['posts'] post_titles = [] for post in posts: if has_access(post, 'read')(): post_titles.append({ 'title': post.title, 'url': h.absurl('/rest' + post.url()) }) return dict(posts=post_titles, count=result['count'], limit=result['limit'], page=result['page'])
def feed(cls, q, feed_type, title, link, description, since=None, until=None, page=None, limit=None): "Produces webhelper.feedgenerator Feed" d = dict(title=title, link=h.absurl(link), description=description, language=u'en', feed_url=request.url) if feed_type == 'atom': feed = FG.Atom1Feed(**d) elif feed_type == 'rss': feed = RssFeed(**d) limit, page = h.paging_sanitizer(limit or 10, page) query = defaultdict(dict) if callable(q): q = q(since, until, page, limit) query.update(q) if since is not None: query['pubdate']['$gte'] = since if until is not None: query['pubdate']['$lte'] = until cur = cls.query.find(query) cur = cur.sort('pubdate', pymongo.DESCENDING) cur = cur.limit(limit) cur = cur.skip(limit * page) for r in cur: feed.add_item(title=r.title, link=h.absurl(r.link.encode('utf-8')), pubdate=r.pubdate, description=r.description, unique_id=h.absurl(r.unique_id), author_name=r.author_name, author_link=h.absurl(r.author_link)) return feed
def feed(self, **kw): data = self._get_activities_data(**kw) response.headers['Content-Type'] = '' response.content_type = 'application/xml' d = { 'title': 'Activity for %s' % data['followee'].activity_name, 'link': h.absurl(self.app.url), 'description': 'Recent activity for %s' % ( data['followee'].activity_name), 'language': u'en', } if request.environ['PATH_INFO'].endswith('.atom'): feed = FG.Atom1Feed(**d) else: feed = FG.Rss201rev2Feed(**d) for t in data['timeline']: url = h.absurl(t.obj.activity_url.encode('utf-8')) feed.add_item(title=u'%s %s %s%s' % ( t.actor.activity_name, t.verb, t.obj.activity_name, ' on %s' % (t.target.activity_name or ''), ), link=url, pubdate=t.published, description=t.obj.activity_extras.get('summary'), unique_id=url, author_name=t.actor.activity_name, author_link=h.absurl(t.actor.activity_url)) return feed.writeString('utf-8')
def get_mail_footer(self, notification, toaddr): if toaddr and toaddr == self.monitoring_email: return MailFooter.monitored( toaddr, h.absurl(self.app.url), h.absurl('{0}admin/{1}/options'.format( self.project.url(), self.app.config.options.mount_point))) return super(Ticket, self).get_mail_footer(notification, toaddr)
def feed(self, **kw): data = self._get_activities_data(**kw) response.headers['Content-Type'] = '' response.content_type = 'application/xml' d = { 'title': 'Activity for %s' % data['followee'].activity_name, 'link': h.absurl(self.app.url), 'description': 'Recent activity for %s' % (data['followee'].activity_name), 'language': u'en', } if request.environ['PATH_INFO'].endswith('.atom'): feed = FG.Atom1Feed(**d) else: feed = FG.Rss201rev2Feed(**d) for t in data['timeline']: url = h.absurl(t.obj.activity_url.encode('utf-8')) feed.add_item(title=u'%s %s %s%s' % ( t.actor.activity_name, t.verb, t.obj.activity_name, ' on %s' % t.target.activity_name if t.target.activity_name else '', ), link=url, pubdate=t.published, description=t.obj.activity_extras.get('summary'), unique_id=url, author_name=t.actor.activity_name, author_link=h.absurl(t.actor.activity_url)) return feed.writeString('utf-8')
def __json__(self): return dict( shortname=self.shortname, name=self.name, _id=str(self._id), url=h.absurl(self.url()), private=self.private, short_description=self.short_description, summary=self.summary, external_homepage=self.external_homepage, socialnetworks=[dict(n) for n in self.socialnetworks], status=self.removal or 'active', moved_to_url=self.moved_to_url, preferred_support_tool=self.support_page, preferred_support_url=self.support_page_url, developers=[u.__json__() for u in self.users_with_role('Developer')], tools=[dict(name=t.tool_name, mount_point=t.options.mount_point, label=t.options.mount_label) for t in self.app_configs if h.has_access(t, 'read')], labels=list(self.labels), categories={ n: [t.__json__() for t in ts] for n, ts in self.all_troves().items()}, icon_url=h.absurl(self.url() + 'icon') if self.icon else None, screenshots = [ dict( url = h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename)), thumbnail_url = h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename) + '/thumb'), caption = ss.caption, ) for ss in self.get_screenshots() ] )
def feed(cls, q, feed_type, title, link, description, since=None, until=None, offset=None, limit=None): "Produces webhelper.feedgenerator Feed" d = dict(title=title, link=h.absurl(link), description=description, language=u'en') if feed_type == 'atom': feed = FG.Atom1Feed(**d) elif feed_type == 'rss': feed = FG.Rss201rev2Feed(**d) query = defaultdict(dict) query.update(q) if since is not None: query['pubdate']['$gte'] = since if until is not None: query['pubdate']['$lte'] = until cur = cls.query.find(query) cur = cur.sort('pubdate', pymongo.DESCENDING) if limit is None: limit = 10 query = cur.limit(limit) if offset is not None: query = cur.offset(offset) for r in cur: feed.add_item(title=r.title, link=h.absurl(r.link.encode('utf-8')), pubdate=r.pubdate, description=r.description, unique_id=h.absurl(r.unique_id), author_name=r.author_name, author_link=h.absurl(r.author_link)) return feed
def get_mail_footer(self, notification, toaddr): if toaddr and toaddr == self.monitoring_email: return MailFooter.monitored( toaddr, h.absurl(self.url()), h.absurl("{0}admin/{1}/forums".format(self.project.url(), self.app.config.options.mount_point)), ) return super(Forum, self).get_mail_footer(notification, toaddr)
def get_mail_footer(self, notification, toaddr): if toaddr and toaddr == self.monitoring_email: return MailFooter.monitored( toaddr, h.absurl(self.app.url), h.absurl("{0}admin/{1}/options".format(self.project.url(), self.app.config.options.mount_point)), ) return MailFooter.standard( notification, self.app.config.options.get("AllowEmailPosting", True), discussion_disabled=self.discussion_disabled, )
def index(self, title="", text="", state="draft", labels="", limit=10, page=0, **kw): if request.method == "POST": require_access(c.app, "write") post = BM.BlogPost.new(title=title, state=state, text=text, labels=labels.split(","), **kw) return exc.HTTPCreated(headers=dict(Location=h.absurl("/rest" + post.url()))) else: result = RootController().index(limit=limit, page=page) posts = result["posts"] post_titles = [] for post in posts: if has_access(post, "read")(): post_titles.append({"title": post.title, "url": h.absurl("/rest" + post.url())}) return dict(posts=post_titles, count=result["count"], limit=result["limit"], page=result["page"])
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) topics = model.Forum.thread_class().query.find( dict(discussion_id=self.forum._id)) topics = topics.sort([('flags', pymongo.DESCENDING), ('last_post_date', pymongo.DESCENDING)]) topics = topics.skip(start).limit(limit) count = topics.count() json = {} json['forum'] = self.forum.__json__( limit=1 ) # small limit since we're going to "del" the threads anyway # topics replace threads here del json['forum']['threads'] json['forum']['topics'] = [ dict(_id=t._id, subject=t.subject, num_replies=t.num_replies, num_views=t.num_views, url=h.absurl('/rest' + t.url()), last_post=t.last_post) for t in topics if t.status == 'ok' ] json['count'] = count json['page'] = page json['limit'] = limit return json
def __json__(self): parents_json = {} for parent in reversed(type(self).mro()): if parent != type(self) and hasattr(parent, '__json__'): parents_json.update(parent.__json__(self)) return dict( parents_json, created_date=self.created_date, ticket_num=self.ticket_num, summary=self.summary, description=self.description, reported_by=self.reported_by_username, assigned_to=self.assigned_to_username, reported_by_id=self.reported_by_id and str(self.reported_by_id) or None, assigned_to_id=self.assigned_to_id and str(self.assigned_to_id) or None, status=self.status, private=self.private, attachments=[ dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments ], custom_fields=dict(self.custom_fields))
def __json__(self): return dict(super(Page, self).__json__(), title=self.title, text=self.text, labels=list(self.labels), attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments])
def merge(self, mr): g = self._impl._git.git # can't merge in bare repo, so need to clone tmp_path = tempfile.mkdtemp() try: tmp_repo = git.Repo.clone_from(self.full_fs_path, to_path=tmp_path, shared=True, bare=False) tmp_repo = GitImplementation(Object(full_fs_path=tmp_path))._git tmp_repo.git.fetch('origin', mr.target_branch) tmp_repo.git.checkout(mr.target_branch) tmp_repo.git.fetch(mr.downstream_repo.full_fs_path, mr.source_branch) author = h.really_unicode(c.user.display_name or c.user.username) tmp_repo.git.config('user.name', author.encode('utf8')) tmp_repo.git.config( 'user.email', 'allura@localhost') # a public email alias could be nice here msg = u'Merge {} branch {} into {}\n\n{}'.format( mr.downstream_repo.url(), mr.source_branch, mr.target_branch, h.absurl(mr.url())) tmp_repo.git.merge(mr.downstream.commit_id, '-m', msg) tmp_repo.git.push('origin', mr.target_branch) finally: shutil.rmtree(tmp_path, ignore_errors=True)
def notify_moderators(self, post): ''' Notify moderators that a post needs approval [#2963] ''' artifact = self.artifact or self subject = '[%s:%s] Moderation action required' % ( c.project.shortname, c.app.config.options.mount_point) author = post.author() url = self.discussion_class().query.get(_id=self.discussion_id).url() text = ('The following submission requires approval at %s before ' 'it can be approved for posting:\n\n%s' % (h.absurl(url + 'moderate'), post.text)) n = Notification( ref_id=artifact.index_id(), topic='message', link=artifact.url(), _id=artifact.url() + post._id, from_address=str(author._id) if author != User.anonymous() else None, reply_to_address=g.noreply, subject=subject, text=text, in_reply_to=post.parent_id, author_id=author._id, pubdate=datetime.utcnow()) users = self.app_config.project.users() for u in users: if (has_access(self, 'moderate', u) and Mailbox.subscribed(user_id=u._id, app_config_id=post.app_config_id)): n.send_direct(str(u._id))
def merge(self, mr): g = self._impl._git.git # can't merge in bare repo, so need to clone tmp_path = tempfile.mkdtemp() try: tmp_repo = git.Repo.clone_from( self.full_fs_path, to_path=tmp_path, shared=True, bare=False) tmp_repo = GitImplementation(Object(full_fs_path=tmp_path))._git tmp_repo.git.fetch('origin', mr.target_branch) tmp_repo.git.checkout(mr.target_branch) tmp_repo.git.fetch(mr.downstream_repo.full_fs_path, mr.source_branch) author = h.really_unicode(c.user.display_name or c.user.username) tmp_repo.git.config('user.name', author.encode('utf8')) tmp_repo.git.config('user.email', 'allura@localhost') # a public email alias could be nice here msg = u'Merge {} branch {} into {}\n\n{}'.format( mr.downstream_repo.url(), mr.source_branch, mr.target_branch, h.absurl(mr.url())) tmp_repo.git.merge(mr.downstream.commit_id, '-m', msg) tmp_repo.git.push('origin', mr.target_branch) finally: shutil.rmtree(tmp_path, ignore_errors=True)
def send_user_mention_notification(self, mentioned_by, artifact): """Send user mention notification to {self} user. """ tmpl = g.jinja2_env.get_template('allura:templates/mail/usermentions_email.md') subject = '[%s:%s] Your name was mentioned' % ( c.project.shortname, c.app.config.options.mount_point) item_url = artifact.url() if artifact.type_s == 'Post': item_url = artifact.url_paginated() tmpl_context = { 'site_domain': config['domain'], 'base_url': config['base_url'], 'user': c.user, 'artifact_link': h.absurl(item_url), 'artifact_linktext': artifact.link_text(), 'mentioned_by': mentioned_by } allura.tasks.mail_tasks.sendsimplemail.post( toaddr=self.get_pref('email_address'), fromaddr=g.noreply, reply_to=g.noreply, message_id=h.gen_message_id(), subject=subject, text=tmpl.render(tmpl_context))
def __json__(self, posts_limit=None): parents_json = {} for parent in reversed(type(self).mro()): if parent != type(self) and hasattr(parent, "__json__"): kwargs = {} if parent == VersionedArtifact: kwargs["posts_limit"] = posts_limit parents_json.update(parent.__json__(self, **kwargs)) return dict( parents_json, created_date=self.created_date, ticket_num=self.ticket_num, summary=self.summary, description=self.description, reported_by=self.reported_by_username, assigned_to=self.assigned_to_id and self.assigned_to_username or None, reported_by_id=self.reported_by_id and str(self.reported_by_id) or None, assigned_to_id=self.assigned_to_id and str(self.assigned_to_id) or None, status=self.status, private=self.private, discussion_disabled=self.discussion_disabled, attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments], custom_fields=dict(self.custom_fields), )
def __json__(self, posts_limit=None): return dict(super(Page, self).__json__(posts_limit=posts_limit), title=self.title, text=self.text, labels=list(self.labels), attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments])
def test_notification_two_attaches(): d = M.Discussion(shortname='test', name='test') t = M.Thread.new(discussion_id=d._id, subject='Test comment notification') fs1 = FieldStorage() fs1.name = 'file_info' fs1.filename = 'fake.txt' fs1.type = 'text/plain' fs1.file = StringIO('this is the content of the fake file\n') fs2 = FieldStorage() fs2.name = 'file_info' fs2.filename = 'fake2.txt' fs2.type = 'text/plain' fs2.file = StringIO('this is the content of the fake file\n') p = t.post(text=u'test message', forum=None, subject='', file_info=[fs1, fs2]) ThreadLocalORMSession.flush_all() n = M.Notification.query.get( subject=u'[test:wiki] Test comment notification') base_url = h.absurl('{}attachment/'.format(p.url())) assert_in( '\nAttachments:\n\n' '- [fake.txt]({0}fake.txt) (37 Bytes; text/plain)\n' '- [fake2.txt]({0}fake2.txt) (37 Bytes; text/plain)'.format(base_url), n.text)
def attachments_for_export(self): return [dict(bytes=attach.length, url=h.absurl(attach.url()), path=os.path.join( self.app_config.options.mount_point, str(self._id), os.path.basename(attach.filename))) for attach in self.attachments]
def send_direct(self, user_id): user = User.query.get(_id=ObjectId(user_id), disabled=False, pending=False) artifact = self.ref.artifact log.debug('Sending direct notification %s to user %s', self._id, user_id) # Don't send if user disabled if not user: log.debug("Skipping notification - enabled user %s not found" % user_id) return # Don't send if user doesn't have read perms to the artifact if user and artifact and \ not security.has_access(artifact, 'read', user)(): log.debug("Skipping notification - User %s doesn't have read " "access to artifact %s" % (user_id, str(self.ref_id))) log.debug("User roles [%s]; artifact ACL [%s]; PSC ACL [%s]", ', '.join([str(r) for r in security.Credentials.get().user_roles( user_id=user_id, project_id=artifact.project._id).reaching_ids]), ', '.join([str(a) for a in artifact.acl]), ', '.join([str(a) for a in artifact.parent_security_context().acl])) return allura.tasks.mail_tasks.sendmail.post( destinations=[str(user_id)], fromaddr=self.from_address, reply_to=self.reply_to_address, subject=self.subject, message_id=self._id, in_reply_to=self.in_reply_to, references=self.references, sender=self._sender(), metalink=h.absurl(self.link), text=(self.text or '') + self.footer())
def make_password_reset_url(self): hash = h.nonce(42) self.set_tool_data('AuthPasswordReset', hash=hash, hash_expiry=datetime.utcnow() + timedelta(seconds=int(config.get('auth.recovery_hash_expiry_period', 600)))) reset_url = h.absurl('/auth/forgotten_password/{}'.format(hash)) return reset_url
def __json__(self, posts_limit=None): return dict(super(BlogPost, self).__json__(posts_limit=posts_limit), author=self.author().username, title=self.title, url=h.absurl('/rest' + self.url()), text=self.text, labels=list(self.labels), state=self.state)
def __json__(self, posts_limit=None, is_export=False): return dict(super(BlogPost, self).__json__(posts_limit=posts_limit, is_export=is_export), author=self.author().username, title=self.title, url=h.absurl('/rest' + self.url()), text=self.text, labels=list(self.labels), state=self.state)
def __json__(self): return { '_id': six.text_type(self._id), 'url': h.absurl('/rest' + self.url()), 'type': six.text_type(self.type), 'hook_url': six.text_type(self.hook_url), 'mod_date': self.mod_date, }
def __json__(self): return { '_id': unicode(self._id), 'url': h.absurl(u'/rest' + self.url()), 'type': unicode(self.type), 'hook_url': unicode(self.hook_url), 'mod_date': self.mod_date, }
def __json__(self): return dict(super(BlogPost, self).__json__(), author=self.author().username, title=self.title, url=h.absurl('/rest' + self.url()), text=self.text, labels=list(self.labels), state=self.state)
def attachments_for_export(self): return [ dict(bytes=attach.length, url=h.absurl(attach.url()), path=os.path.join(self.app_config.options.mount_point, str(self._id), os.path.basename(attach.filename))) for attach in self.attachments ]
def __json__(self): return dict( _id=str(self._id), mod_date=self.mod_date, labels=self.labels, related_artifacts=[a.url() for a in self.related_artifacts()], discussion_thread=self.discussion_thread, discussion_thread_url=h.absurl('/rest%s' % self.discussion_thread.url()), )
def __json__(self): return dict(super(Page, self).__json__(), title=self.title, text=self.text, labels=self.labels, attachments=[ dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments ])
def doap(self, parent): """App's representation for DOAP API. :param parent: Element to contain the results :type parent: xml.etree.ElementTree.Element or xml.etree.ElementTree.SubElement """ feature = ET.SubElement(parent, 'sf:feature') feature = ET.SubElement(feature, 'sf:Feature') ET.SubElement(feature, 'name').text = self.config.options.mount_label ET.SubElement(feature, 'foaf:page', {'rdf:resource': h.absurl(self.url)})
def index(self, title='', text='', state='draft', labels='', limit=10, page=0, **kw): if request.method == 'POST': require_access(c.app, 'write') post = BM.BlogPost.new( title=title, state=state, text=text, labels=labels.split(','), **kw) return exc.HTTPCreated(headers=dict(Location=h.absurl('/rest' + post.url()))) else: result = RootController().index(limit=limit, page=page) posts = result['posts'] post_titles = [] for post in posts: if has_access(post, 'read')(): post_titles.append({'title': post.title, 'url': h.absurl('/rest' + post.url())}) return dict(posts=post_titles, count=result['count'], limit=result['limit'], page=result['page'])
def __json__(self): return dict( _id=self._id, discussion_id=str(self.discussion_id), subject=self.subject, posts=[dict(slug=p.slug, subject=p.subject, attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in p.attachments]) for p in self.posts])
def __json__(self): return dict(shortname=self.shortname, name=self.name, _id=str(self._id), url=h.absurl(self.url()), private=self.private, short_description=self.short_description, summary=self.summary, external_homepage=self.external_homepage, socialnetworks=[dict(n) for n in self.socialnetworks], status=self.removal or 'active', moved_to_url=self.moved_to_url, preferred_support_tool=self.support_page, preferred_support_url=self.support_page_url, developers=[ u.__json__() for u in self.users_with_role('Developer') ], tools=[ dict(name=t.tool_name, mount_point=t.options.mount_point, label=t.options.mount_label) for t in self.app_configs if h.has_access(t, 'read') ], labels=list(self.labels), categories={ n: [t.__json__() for t in ts] for n, ts in self.all_troves().items() }, icon_url=h.absurl(self.url() + 'icon') if self.icon else None, screenshots=[ dict( url=h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename)), thumbnail_url=h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename) + '/thumb'), caption=ss.caption, ) for ss in self.get_screenshots() ])
def get_data(self, text, artifact=None, user=None, content_type='comment', request=None, **kw): # Docs: https://akismet.com/development/api/ kw['comment_content'] = text kw['comment_type'] = content_type if artifact: try: # if its a comment, get wiki, ticket, etc URL url = artifact.main_url() except Exception: url = artifact.url() kw['permalink'] = h.absurl(url) if hasattr(artifact, 'timestamp'): # Message & Post objects date_created = artifact.timestamp else: # fallback for other artifacts, not exactly "created" date though date_created = artifact.mod_date kw['comment_date_gmt'] = date_created.isoformat() kw['comment_post_modified_gmt'] = artifact.primary( ).mod_date.isoformat() user = user or c.user if user: kw['comment_author'] = user.display_name or user.username kw['comment_author_email'] = user.email_addresses[ 0] if user.email_addresses else '' if request is not None: kw['user_ip'] = utils.ip_address(request) kw['user_agent'] = request.headers.get('USER_AGENT') kw['referrer'] = request.headers.get('REFERER') else: # these are required fields, but for ham/spam reports we don't have the original values to send :/ kw['user_ip'] = None kw['user_agent'] = None if artifact and hasattr( artifact, 'get_version' ): # VersionedArtifacts (includes comment posts) try: kw['user_ip'] = artifact.get_version(1).author.logged_ip except IndexError: log.debug("couldn't get Snapshot for this artifact %s", artifact) # kw will be urlencoded, need to utf8-encode for k, v in kw.items(): kw[k] = h.really_unicode(v).encode('utf8') return kw
def __json__(self): """Return a JSON-encodable :class:`dict` representation of this Artifact. """ return dict( _id=str(self._id), mod_date=self.mod_date, labels=list(self.labels), related_artifacts=[a.url() for a in self.related_artifacts()], discussion_thread=self.discussion_thread.__json__(), discussion_thread_url=h.absurl('/rest%s' % self.discussion_thread.url()), )
def __json__(self): return dict(_id=self._id, discussion_id=str(self.discussion_id), subject=self.subject, posts=[ dict(slug=p.slug, subject=p.subject, attachments=[ dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in p.attachments ]) for p in self.posts ])
def __json__(self): result = dict( shortname=self.shortname, name=self.name, _id=str(self._id), url=h.absurl(self.url()), private=self.private, short_description=self.short_description, creation_date=plugin.ProjectRegistrationProvider.get().registration_date(self).strftime('%Y-%m-%d'), summary=self.summary, external_homepage=self.external_homepage, video_url=self.video_url, socialnetworks=[dict(n) for n in self.socialnetworks], status=self.removal or 'active', moved_to_url=self.moved_to_url, preferred_support_tool=self.support_page, preferred_support_url=self.support_page_url, developers=[u.__json__() for u in self.users_with_role('Developer')], tools=[self.app_instance(t) for t in self.app_configs if h.has_access(t, 'read')], labels=list(self.labels), categories={n: [t.__json__() for t in ts] for n, ts in self.all_troves().items()}, icon_url=h.absurl(self.url() + 'icon') if self.icon else None, screenshots=[ dict( url=h.absurl(self.url() + 'screenshot/' + urllib.quote(ss.filename.encode('utf8'))), thumbnail_url=h.absurl( self.url( ) + 'screenshot/' + urllib.quote(ss.filename.encode('utf8')) + '/thumb'), caption=ss.caption, ) for ss in self.get_screenshots() ] ) if self.is_user_project: result['profile_api_url'] = h.absurl('/rest' + self.url() + 'profile/') return result
def add_project(self, **kw): # TODO: currently limited to 'admin' permissions instead of 'register' since not enough validation is in place. # There is sanity checks and validation that the user may create a project, but not on project fields # for example: tool_data, admins, awards, etc can be set arbitrarily right now # and normal fields like description, summary, external_homepage, troves etc don't have validation on length, # quantity, value etc. which match the HTML web form validations # if/when this is handled better, the following line can be updated. Also update api.raml docs # security.require_access(self._neighborhood, 'register') security.require_access(self._neighborhood, 'admin') project_reg = plugin.ProjectRegistrationProvider.get() jsondata = json.loads(request.body) projectSchema = make_newproject_schema(self._neighborhood) try: pdata = deserialize_project(jsondata, projectSchema, self._neighborhood) shortname = pdata.shortname project_reg.validate_project(self._neighborhood, shortname, pdata.name, c.user, user_project=False, private_project=pdata.private) except (colander.Invalid, ForgeError) as e: response.status_int = 400 return { 'error': six.text_type(e) or repr(e), } project = create_project_with_attrs(pdata, self._neighborhood) response.status_int = 201 response.location = str(h.absurl('/rest' + project.url())) return { "status": "success", "html_url": h.absurl(project.url()), "url": h.absurl('/rest' + project.url()), }
def index(self, title='', text='', state='draft', labels='', limit=10, page=0, **kw): if request.method == 'POST': require_access(c.app, 'write') if BM.BlogPost.is_limit_exceeded(c.app.config, user=c.user): log.warn('Create/edit rate limit exceeded. %s', c.app.config.url()) raise forge_exc.HTTPTooManyRequests() post = BM.BlogPost.new( title=title, state=state, text=text, labels=labels.split(','), **kw) return exc.HTTPCreated(headers=dict(Location=h.absurl('/rest' + post.url()).encode('utf-8'))) else: result = RootController().index(limit=limit, page=page) posts = result['posts'] post_titles = [] for post in posts: if has_access(post, 'read')(): post_titles.append( {'title': post.title, 'url': h.absurl('/rest' + post.url())}) return dict(posts=post_titles, count=result['count'], limit=result['limit'], page=result['page'])
def __json__(self, posts_limit=None, is_export=False, user=None): """Return a JSON-encodable :class:`dict` representation of this Artifact. """ return dict( _id=str(self._id), mod_date=self.mod_date, labels=list(self.labels), related_artifacts=[a.url() for a in self.related_artifacts(user=user or c.user)], discussion_thread=self.discussion_thread.__json__(limit=posts_limit, is_export=is_export), discussion_thread_url=h.absurl('/rest%s' % self.discussion_thread.url()), )
def index(self, title='', text='', state='draft', labels='', limit=10, page=0, **kw): if request.method == 'POST': require_access(c.app, 'write') if BM.BlogPost.is_limit_exceeded(c.app.config, user=c.user): log.warn('Create/edit rate limit exceeded. %s', c.app.config.url()) raise forge_exc.HTTPTooManyRequests() post = BM.BlogPost.new( title=title, state=state, text=text, labels=labels.split(','), **kw) return exc.HTTPCreated(headers=dict(Location=str(h.absurl('/rest' + post.url()).encode('utf-8')))) else: result = RootController().index(limit=limit, page=page) posts = result['posts'] post_titles = [] for post in posts: if has_access(post, 'read')(): post_titles.append( {'title': post.title, 'url': h.absurl('/rest' + post.url())}) return dict(posts=post_titles, count=result['count'], limit=result['limit'], page=result['page'])
def __json__(self, limit=None, page=None): return dict( _id=self._id, discussion_id=str(self.discussion_id), subject=self.subject, posts=[dict(slug=p.slug, text=p.text, subject=p.subject, author=p.author().username, timestamp=p.timestamp, attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in p.attachments]) for p in self.query_posts(status='ok', style='chronological', limit=limit, page=page) ] )
def __json__(self): return dict(super(Ticket,self).__json__(), created_date=self.created_date, ticket_num=self.ticket_num, summary=self.summary, description=self.description, reported_by=self.reported_by_username, assigned_to=self.assigned_to_username, reported_by_id=self.reported_by_id and str(self.reported_by_id) or None, assigned_to_id=self.assigned_to_id and str(self.assigned_to_id) or None, status=self.status, private=self.private, attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments], custom_fields=self.custom_fields)
def url(email=None, gravatar_id=None, **kw): """Build a complete gravatar URL with our favorite defaults. Keyword Arguments: email -- (required if gravatar_id is None) a string containing an email address to be digested by gravatar.id(), above. gravatar_id -- (optional) the pre-digested id from which to build the URL, as generated by gravatar.id(), above. You must provide at least one of gravatar_id or email. email will be ignored if both are supplied. Remaining keyword arguments will be used to construct the URL itself. Gravatar recognizes these three: 's' or 'size' -- size in pixels of the returned square image, Gravatar's default is 80x80 'd' or 'default' -- URL for a default image, or a keyword naming a Gravatar supplied default e.g., 'wavatar', 'identicon'. 'r' or 'rating' -- 'g', 'pg', 'r', or 'x' to refuse any "harder" rated image. We default to 'pg'. Example: gravatar.url('*****@*****.**', size=24) Result: "https://secure.gravatar.com/avatar/d3514940ac1b2051c8aa42970d17e3fe?size=24&r=pg&d=wavatar" Example: saved_id = gravatar.id('*****@*****.**') url = gravatar.url(gravatar_id=saved_id, r='g') Result: "https://secure.gravatar.com/avatar/d3514940ac1b2051c8aa42970d17e3fe?r=g&d=wavatar" """ assert gravatar_id or email if gravatar_id is None: gravatar_id = id(email) if 'r' not in kw and 'rating' not in kw: kw['r'] = 'pg' if 'd' not in kw and config.get('default_avatar_image'): kw['d'] = h.absurl(config['default_avatar_image']) return ('https://secure.gravatar.com/avatar/%s?%s' % (gravatar_id, six.moves.urllib.parse.urlencode(kw)))
def url(email=None, gravatar_id=None, **kw): """Build a complete gravatar URL with our favorite defaults. Keyword Arguments: email -- (required if gravatar_id is None) a string containing an email address to be digested by gravatar.id(), above. gravatar_id -- (optional) the pre-digested id from which to build the URL, as generated by gravatar.id(), above. You must provide at least one of gravatar_id or email. email will be ignored if both are supplied. Remaining keyword arguments will be used to construct the URL itself. Gravatar recognizes these three: 's' or 'size' -- size in pixels of the returned square image, Gravatar's default is 80x80 'd' or 'default' -- URL for a default image, or a keyword naming a Gravatar supplied default e.g., 'wavatar', 'identicon'. 'r' or 'rating' -- 'g', 'pg', 'r', or 'x' to refuse any "harder" rated image. We default to 'pg'. Example: gravatar.url('*****@*****.**', size=24) Result: "https://secure.gravatar.com/avatar/d3514940ac1b2051c8aa42970d17e3fe?size=24&r=pg&d=wavatar" Example: saved_id = gravatar.id('*****@*****.**') url = gravatar.url(gravatar_id=saved_id, r='g') Result: "https://secure.gravatar.com/avatar/d3514940ac1b2051c8aa42970d17e3fe?r=g&d=wavatar" """ assert gravatar_id or email if gravatar_id is None: gravatar_id = id(email) if 'r' not in kw and 'rating' not in kw: kw['r'] = 'pg' if 'd' not in kw and config.get('default_avatar_image'): kw['d'] = h.absurl(config['default_avatar_image']) return ('https://secure.gravatar.com/avatar/%s?%s' % (gravatar_id, urllib.urlencode(kw)))
def __json__(self, limit=None, page=None, is_export=False): return dict( _id=self._id, discussion_id=str(self.discussion_id), subject=self.subject, limit=limit, page=page, posts=[dict(slug=p.slug, text=p.text, subject=p.subject, author=p.author().username, author_icon_url=h.absurl(p.author().icon_url()), timestamp=p.timestamp, last_edited=p.last_edit_date, attachments=self.attachment_for_export(p) if is_export else self.attachments_for_json(p)) for p in self.query_posts(status='ok', style='chronological', limit=limit, page=page) ] )
def index(self, limit=None, page=0, **kw): limit, page, start = g.handle_paging(limit, int(page)) forums = model.Forum.query.find(dict( app_config_id=c.app.config._id, parent_id=None, deleted=False) ).sort([('shortname', pymongo.ASCENDING)]).skip(start).limit(limit) count = forums.count() json = dict(forums=[dict(_id=f._id, name=f.name, shortname=f.shortname, description=f.description, num_topics=f.num_topics, last_post=f.last_post, url=h.absurl('/rest' + f.url())) for f in forums if has_access(f, 'read')]) json['limit'] = limit json['page'] = page json['count'] = count return json
def send_verification_link(self): self.set_nonce_hash() log.info('Sending verification link to %s', self.email) text = ''' To verify the email address %s belongs to the user %s, please visit the following URL: %s ''' % (self.email, self.claimed_by_user(include_pending=True).username, h.absurl('/auth/verify_addr?a={}'.format(h.urlquote(self.nonce))), ) log.info('Verification email:\n%s', text) allura.tasks.mail_tasks.sendsimplemail.post( fromaddr=g.noreply, reply_to=g.noreply, toaddr=self.email, subject=u'%s - Email address verification' % config['site_name'], message_id=h.gen_message_id(), text=text)
def __json__(self): parents_json = {} for parent in reversed(type(self).mro()): if parent != type(self) and hasattr(parent, '__json__'): parents_json.update(parent.__json__(self)) return dict(parents_json, created_date=self.created_date, ticket_num=self.ticket_num, summary=self.summary, description=self.description, reported_by=self.reported_by_username, assigned_to=self.assigned_to_id and self.assigned_to_username or None, reported_by_id=self.reported_by_id and str(self.reported_by_id) or None, assigned_to_id=self.assigned_to_id and str(self.assigned_to_id) or None, status=self.status, private=self.private, attachments=[dict(bytes=attach.length, url=h.absurl(attach.url())) for attach in self.attachments], custom_fields=dict(self.custom_fields))