def __init__(self, permissions, **kwargs): if permissions is None: permission = S.String() else: permission = S.OneOf('*', *permissions) super(ACE, self).__init__(fields=dict(access=S.OneOf(self.ALLOW, self.DENY), role_id=S.ObjectId(), permission=permission), **kwargs)
class Example(SproxTestClass): class __mongometa__: name = 'example_rs' _id = FieldProperty(S.ObjectId) created = FieldProperty(datetime, if_missing=datetime.now) blob = FieldProperty(S.Binary) # XXX BLOB? binary = FieldProperty(S.Binary) boolean = FieldProperty(bool) char = FieldProperty(str) cLOB = FieldProperty(str) # XXX CLOB? date_ = FieldProperty(datetime) # XXX date? datetime_ = FieldProperty(datetime) decimal = FieldProperty(Decimal) date = FieldProperty(datetime) # XXX date? datetime = FieldProperty(datetime) float__ = FieldProperty(float) float_ = FieldProperty(float) int_ = FieldProperty(int) integer = FieldProperty(int, if_missing=10) interval = FieldProperty(timedelta) numeric = FieldProperty(Decimal) pickletype = FieldProperty(str) # XXX pickle smallint = FieldProperty(int) smalliunteger = FieldProperty(int) string = FieldProperty(str) text = FieldProperty(str) time_ = FieldProperty(datetime) # XXX time timestamp = FieldProperty(datetime) unicode_ = FieldProperty(str) varchar = FieldProperty(str) password = FieldProperty(str) oneof = FieldProperty(S.OneOf("one", "two", "three"))
class ForgeDiscussionApp(Application): __version__ = version.__version__ permissions = ['configure', 'read', 'unmoderated_post', 'post', 'moderate', 'admin'] permissions_desc = { 'configure': 'Create new forums.', 'read': 'View posts.', 'admin': 'Set permissions. Edit forum properties.', } config_options = Application.config_options + [ ConfigOption('PostingPolicy', schema.OneOf('ApproveOnceModerated', 'ModerateAll'), 'ApproveOnceModerated'), ConfigOption('AllowEmailPosting', bool, True) ] PostClass = DM.ForumPost AttachmentClass = DM.ForumAttachment searchable = True exportable = True tool_label = 'Discussion' tool_description = """ Discussion forums are a place to talk about any topics related to your project. You may set up multiple forums within the Discussion tool. """ default_mount_label = 'Discussion' default_mount_point = 'discussion' ordinal = 7 icons = { 24: 'images/forums_24.png', 32: 'images/forums_32.png', 48: 'images/forums_48.png' } def __init__(self, project, config): Application.__init__(self, project, config) self.root = RootController() self.api_root = RootRestController() self.admin = ForumAdminController(self) def has_access(self, user, topic): f = DM.Forum.query.get(shortname=topic.replace('.', '/'), app_config_id=self.config._id) return has_access(f, 'post', user=user)() def handle_message(self, topic, message): log.info('Message from %s (%s)', topic, self.config.options.mount_point) log.info('Headers are: %s', message['headers']) shortname = urllib.unquote_plus(topic.replace('.', '/')) forum = DM.Forum.query.get( shortname=shortname, app_config_id=self.config._id) if forum is None: log.error("Error looking up forum: %r", shortname) return self.handle_artifact_message(forum, message) def main_menu(self): '''Apps should provide their entries to be added to the main nav :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>` ''' return [SitemapEntry( self.config.options.mount_label, '.')] @property @h.exceptionless([], log) def sitemap(self): menu_id = self.config.options.mount_label with h.push_config(c, app=self): return [ SitemapEntry(menu_id, '.')[self.sidebar_menu()]] @property def forums(self): return DM.Forum.query.find(dict(app_config_id=self.config._id)).all() @property def top_forums(self): return self.subforums_of(None) def subforums_of(self, parent_id): return DM.Forum.query.find(dict( app_config_id=self.config._id, parent_id=parent_id, )).all() def admin_menu(self): admin_url = c.project.url() + 'admin/' + \ self.config.options.mount_point + '/' links = [] if has_access(self, 'configure')(): links.append(SitemapEntry('Forums', admin_url + 'forums')) links += super(ForgeDiscussionApp, self).admin_menu() return links def sidebar_menu(self): try: l = [] moderate_link = None forum_links = [] forums = DM.Forum.query.find(dict( app_config_id=c.app.config._id, parent_id=None, deleted=False)) for f in forums: if has_access(f, 'read')(): if f.url() in request.url and h.has_access(f, 'moderate')(): num_moderate = DM.ForumPost.query.find({ 'discussion_id': f._id, 'status': {'$ne': 'ok'}, 'deleted': False, }).count() moderate_link = SitemapEntry( 'Moderate', "%smoderate/" % f.url(), ui_icon=g.icons['moderate'], small=num_moderate) forum_links.append( SitemapEntry(f.name, f.url(), small=f.num_topics)) url = c.app.url + 'create_topic/' url = h.urlquote( url + c.forum.shortname if getattr(c, 'forum', None) and c.forum else url) l.append( SitemapEntry('Create Topic', url, ui_icon=g.icons['add'])) if has_access(c.app, 'configure')(): l.append(SitemapEntry('Add Forum', c.app.url + 'new_forum', ui_icon=g.icons['conversation'])) l.append(SitemapEntry('Admin Forums', c.project.url() + 'admin/' + self.config.options.mount_point + '/forums', ui_icon=g.icons['admin'])) if moderate_link: l.append(moderate_link) # if we are in a thread and not anonymous, provide placeholder # links to use in js if '/thread/' in request.url and c.user not in (None, M.User.anonymous()): l.append(SitemapEntry( 'Mark as Spam', 'flag_as_spam', ui_icon=g.icons['flag'], className='sidebar_thread_spam')) l.append(SitemapEntry('Stats Graph', c.app.url + 'stats', ui_icon=g.icons['stats'])) if forum_links: l.append(SitemapEntry('Forums')) l = l + forum_links l.append(SitemapEntry('Help')) l.append( SitemapEntry('Formatting Help', c.app.url + 'markdown_syntax')) return l except: # pragma no cover log.exception('sidebar_menu') return [] def install(self, project): 'Set up any default permissions and roles here' # Don't call super install here, as that sets up discussion for a tool # Setup permissions role_admin = M.ProjectRole.by_name('Admin')._id role_developer = M.ProjectRole.by_name('Developer')._id role_auth = M.ProjectRole.by_name('*authenticated')._id role_anon = M.ProjectRole.by_name('*anonymous')._id self.config.acl = [ M.ACE.allow(role_anon, 'read'), M.ACE.allow(role_auth, 'post'), M.ACE.allow(role_auth, 'unmoderated_post'), M.ACE.allow(role_developer, 'moderate'), M.ACE.allow(role_admin, 'configure'), M.ACE.allow(role_admin, 'admin'), ] utils.create_forum(self, new_forum=dict( shortname='general', create='on', name='General Discussion', description='Forum about anything you want to talk about.', parent='', members_only=False, anon_posts=False, monitoring_email=None)) def uninstall(self, project): "Remove all the tool's artifacts from the database" DM.Forum.query.remove(dict(app_config_id=self.config._id)) DM.ForumThread.query.remove(dict(app_config_id=self.config._id)) DM.ForumPost.query.remove(dict(app_config_id=self.config._id)) super(ForgeDiscussionApp, self).uninstall(project) def bulk_export(self, f, export_path='', with_attachments=False): f.write('{"forums": [') forums = list(DM.Forum.query.find(dict(app_config_id=self.config._id))) if with_attachments: GenericJSON = JSONForExport for forum in forums: self.export_attachments(forum.threads, export_path) else: GenericJSON = jsonify.GenericJSON for i, forum in enumerate(forums): if i > 0: f.write(',') json.dump(forum, f, cls=GenericJSON, indent=2) f.write(']}') def export_attachments(self, threads, export_path): for thread in threads: for post in thread.query_posts(status='ok'): post_path = self.get_attachment_export_path( export_path, str(thread.artifact._id), thread._id, post.slug ) self.save_attachments(post_path, post.attachments)
from ming.base import Object from ming.utils import LazyProperty from ming.orm import mapper, session from allura.lib import utils from allura.lib import helpers as h from .auth import User from .session import main_doc_session, project_doc_session from .session import repository_orm_session log = logging.getLogger(__name__) # Some schema types SUser = dict(name=str, email=str, date=datetime) SObjType = S.OneOf('blob', 'tree', 'submodule') # Used for when we're going to batch queries using $in QSIZE = 100 README_RE = re.compile('^README(\.[^.]*)?$', re.IGNORECASE) VIEWABLE_EXTENSIONS = [ '.php', '.py', '.js', '.java', '.html', '.htm', '.yaml', '.sh', '.rb', '.phtml', '.txt', '.bat', '.ps1', '.xhtml', '.css', '.cfm', '.jsp', '.jspx', '.pl', '.php4', '.php3', '.rhtml', '.svg', '.markdown', '.json', '.ini', '.tcl', '.vbs', '.xsl' ] DIFF_SIMILARITY_THRESHOLD = .5 # used for determining file renames # Basic commit information # One of these for each commit in the physical repo on disk. The _id is the
class BlogPost(M.VersionedArtifact, ActivityObject): class __mongometa__: name = 'blog_post' history_class = BlogPostSnapshot unique_indexes = [('project_id', 'app_config_id', 'slug')] type_s = 'Blog Post' title = FieldProperty(str, if_missing='Untitled') text = FieldProperty(str, if_missing='') timestamp = FieldProperty(datetime, if_missing=datetime.utcnow) slug = FieldProperty(str) state = FieldProperty(schema.OneOf('draft', 'published'), if_missing='draft') neighborhood_id = ForeignIdProperty('Neighborhood', if_missing=None) @property def activity_name(self): return 'blog post %s' % self.title def author(self): '''The author of the first snapshot of this BlogPost''' return M.User.query.get( _id=self.get_version(1).author.id) or M.User.anonymous() def _get_date(self): return self.timestamp.date() def _set_date(self, value): self.timestamp = datetime.combine(value, self.time) date = property(_get_date, _set_date) def _get_time(self): return self.timestamp.time() def _set_time(self, value): self.timestamp = datetime.combine(self.date, value) time = property(_get_time, _set_time) @property def html_text(self): return g.markdown.convert(self.text) @property def html_text_preview(self): """Return an html preview of the BlogPost text. Truncation happens at paragraph boundaries to avoid chopping markdown in inappropriate places. If the entire post is one paragraph, the full text is returned. If the entire text is <= 400 chars, the full text is returned. Else, at least 400 chars are returned, rounding up to the nearest whole paragraph. If truncation occurs, a hyperlink to the full text is appended. """ # Splitting on spaces or single lines breaks isn't sufficient as some # markup can span spaces and single line breaks. Converting to HTML # first and *then* truncating doesn't work either, because the # ellipsis tag ends up orphaned from the main text. ellipsis = '... [read more](%s)' % self.url() paragraphs = self.text.replace('\r', '').split('\n\n') total_length = 0 for i, p in enumerate(paragraphs): total_length += len(p) if total_length >= 400: break text = '\n\n'.join(paragraphs[:i + 1]) return g.markdown.convert( text + (ellipsis if i + 1 < len(paragraphs) else '')) @property def email_address(self): domain = '.'.join(reversed(self.app.url[1:-1].split('/'))).replace( '_', '-') return '%s@%s%s' % (self.title.replace( '/', '.'), domain, config.common_suffix) @staticmethod def make_base_slug(title, timestamp): slugsafe = ''.join(ch.lower() for ch in title.replace(' ', '-') if ch.isalnum() or ch == '-') return '%s/%s' % (timestamp.strftime('%Y/%m'), slugsafe) def make_slug(self): base = BlogPost.make_base_slug(self.title, self.timestamp) self.slug = base while True: try: session(self).insert_now(self, state(self)) return self.slug except DuplicateKeyError: self.slug = base + '-%.3d' % randint(0, 999) def url(self): return self.app.url + self.slug + '/' def shorthand_id(self): return self.slug def index(self): result = super(BlogPost, self).index() result.update(title_s=self.slug, type_s=self.type_s, state_s=self.state, snippet_s='%s: %s' % (self.title, h.text.truncate(self.text, 200)), text=self.text) return result def get_version(self, version): HC = self.__mongometa__.history_class return HC.query.find({ 'artifact_id': self._id, 'version': int(version) }).one() def commit(self): activity = functools.partial(g.director.create_activity, c.user, target=c.project) self.subscribe() super(BlogPost, self).commit() if self.version > 1: v1 = self.get_version(self.version - 1) v2 = self la = [line + '\n' for line in v1.text.splitlines()] lb = [line + '\n' for line in v2.text.splitlines()] diff = ''.join( difflib.unified_diff(la, lb, 'v%d' % v1.version, 'v%d' % v2.version)) description = diff if v1.state != 'published' and v2.state == 'published': activity('created', self) M.Feed.post(self, self.title, self.text, author=self.author()) description = self.text subject = '%s created post %s' % (c.user.username, self.title) elif v1.title != v2.title: activity('renamed', self) subject = '%s renamed post %s to %s' % (c.user.username, v2.title, v1.title) else: activity('modified', self) subject = '%s modified post %s' % (c.user.username, self.title) else: description = self.text subject = '%s created post %s' % (c.user.username, self.title) if self.state == 'published': activity('created', self) M.Feed.post(self, self.title, self.text, author=self.author()) if self.state == 'published': M.Notification.post(artifact=self, topic='metadata', text=description, subject=subject)
class MonQTask(MappedClass): '''Task to be executed by the taskd daemon. Properties - _id - bson.ObjectId() for this task - state - 'ready', 'busy', 'error', 'complete', or 'skipped' task status - priority - integer priority, higher is more priority - result_type - either 'keep' or 'forget', what to do with the task when it's done - time_queue - time the task was queued - time_start - time taskd began working on the task - time_stop - time taskd stopped working on the task - task_name - full dotted name of the task function to run - process - identifier for which taskd process is working on the task - context - values used to set c.project, c.app, c.user for the task - args - ``*args`` to be sent to the task function - kwargs - ``**kwargs`` to be sent to the task function - result - if the task is complete, the return value. If in error, the traceback. ''' states = ('ready', 'busy', 'error', 'complete', 'skipped') result_types = ('keep', 'forget') class __mongometa__: session = task_orm_session name = str('monq_task') indexes = [ [ # used in MonQTask.get() method # also 'state' queries exist in several other methods ('state', ming.ASCENDING), ('priority', ming.DESCENDING), ('time_queue', ming.ASCENDING) ], [ # used by repo tarball status check, etc 'state', 'task_name', 'time_queue' ], ] _id = FieldProperty(S.ObjectId) state = FieldProperty(S.OneOf(*states)) priority = FieldProperty(int) result_type = FieldProperty(S.OneOf(*result_types)) time_queue = FieldProperty(datetime, if_missing=datetime.utcnow) time_start = FieldProperty(datetime, if_missing=None) time_stop = FieldProperty(datetime, if_missing=None) task_name = FieldProperty(str) process = FieldProperty(str) context = FieldProperty(dict( project_id=S.ObjectId, app_config_id=S.ObjectId, user_id=S.ObjectId, notifications_disabled=bool)) args = FieldProperty([]) kwargs = FieldProperty({None: None}) result = FieldProperty(None, if_missing=None) sort = [ ('priority', ming.DESCENDING), ('time_queue', ming.ASCENDING), ] def __repr__(self): from allura import model as M project = M.Project.query.get(_id=self.context.project_id) app = None if project: app_config = M.AppConfig.query.get(_id=self.context.app_config_id) if app_config: app = project.app_instance(app_config) user = M.User.query.get(_id=self.context.user_id) project_url = project and project.url() or None app_mount = app and app.config.options.mount_point or None username = user and user.username or None return '<%s %s (%s) P:%d %s %s project:%s app:%s user:%s>' % ( self.__class__.__name__, self._id, self.state, self.priority, self.task_name, self.process, project_url, app_mount, username) @LazyProperty def function(self): '''The function that is called by this task''' smod, sfunc = self.task_name.rsplit('.', 1) cur = __import__(smod, fromlist=[sfunc]) return getattr(cur, sfunc) @classmethod def post(cls, function, args=None, kwargs=None, result_type='forget', priority=10, delay=0, flush_immediately=True, ): '''Create a new task object based on the current context.''' if args is None: args = () if kwargs is None: kwargs = {} task_name = '%s.%s' % ( function.__module__, function.__name__) context = dict( project_id=None, app_config_id=None, user_id=None, notifications_disabled=False) if getattr(c, 'project', None): context['project_id'] = c.project._id context[ 'notifications_disabled'] = c.project.notifications_disabled if getattr(c, 'app', None): context['app_config_id'] = c.app.config._id if getattr(c, 'user', None): context['user_id'] = c.user._id obj = cls( state='ready', priority=priority, result_type=result_type, task_name=task_name, args=args, kwargs=kwargs, process=None, result=None, context=context, time_queue=datetime.utcnow() + timedelta(seconds=delay)) if flush_immediately: session(obj).flush(obj) return obj @classmethod def get(cls, process='worker', state='ready', waitfunc=None, only=None): '''Get the highest-priority, oldest, ready task and lock it to the current process. If no task is available and waitfunc is supplied, call the waitfunc before trying to get the task again. If waitfunc is None and no tasks are available, return None. If waitfunc raises a StopIteration, stop waiting for a task ''' while True: try: query = dict(state=state) query['time_queue'] = {'$lte': datetime.utcnow()} if only: query['task_name'] = {'$in': only} obj = cls.query.find_and_modify( query=query, update={ '$set': dict( state='busy', process=process) }, new=True, sort=cls.sort) if obj is not None: return obj except pymongo.errors.OperationFailure as exc: if 'No matching object found' not in exc.args[0]: raise if waitfunc is None: return None try: waitfunc() except StopIteration: return None @classmethod def run_ready(cls, worker=None): '''Run all the tasks that are currently ready''' i = 0 for i, task in enumerate(cls.query.find(dict(state='ready')).sort(cls.sort).all()): task.process = worker task() return i def __call__(self, restore_context=True, nocapture=False): '''Call the task function with its context. If restore_context is True, c.project/app/user will be restored to the values they had before this function was called. ''' from allura import model as M self.time_start = datetime.utcnow() session(self).flush(self) log.info('starting %r', self) old_cproject = getattr(c, 'project', None) old_capp = getattr(c, 'app', None) old_cuser = getattr(c, 'user', None) try: func = self.function c.project = M.Project.query.get(_id=self.context.project_id) c.app = None if c.project: c.project.notifications_disabled = self.context.get( 'notifications_disabled', False) app_config = M.AppConfig.query.get( _id=self.context.app_config_id) if app_config: c.app = c.project.app_instance(app_config) c.user = M.User.query.get(_id=self.context.user_id) with null_contextmanager() if nocapture else log_output(log): self.result = func(*self.args, **self.kwargs) self.state = 'complete' return self.result except Exception as exc: if asbool(config.get('monq.raise_errors')): raise else: log.exception('Error "%s" on job %s', exc, self) self.state = 'error' if hasattr(exc, 'format_error'): self.result = exc.format_error() log.error(self.result) else: self.result = traceback.format_exc() finally: self.time_stop = datetime.utcnow() session(self).flush(self) if restore_context: c.project = old_cproject c.app = old_capp c.user = old_cuser def join(self, poll_interval=0.1): '''Wait until this task is either complete or errors out, then return the result.''' while self.state not in ('complete', 'error'): time.sleep(poll_interval) self.query.find(dict(_id=self._id), refresh=True).first() return self.result @classmethod def list(cls, state='ready'): '''Print all tasks of a certain status to sys.stdout. Used for debugging.''' for t in cls.query.find(dict(state=state)): sys.stdout.write('%r\n' % t)
class User(MappedClass, ActivityNode, ActivityObject): SALT_LEN = 8 class __mongometa__: name = 'user' session = main_orm_session indexes = ['tool_data.sfx.userid'] unique_indexes = ['username'] _id = FieldProperty(S.ObjectId) sfx_userid = FieldProperty(S.Deprecated) username = FieldProperty(str) open_ids = FieldProperty([str]) email_addresses = FieldProperty([str]) password = FieldProperty(str) projects = FieldProperty(S.Deprecated) tool_preferences = FieldProperty({str: { str: None }}) # full mount point: prefs dict tool_data = FieldProperty({str: {str: None}}) # entry point: prefs dict display_name = FieldProperty(str) disabled = FieldProperty(bool, if_missing=False) # Don't use directly, use get/set_pref() instead preferences = FieldProperty( dict(results_per_page=int, email_address=str, email_format=str)) #Personal data sex = FieldProperty( S.OneOf('Male', 'Female', 'Other', 'Unknown', if_missing='Unknown')) birthdate = FieldProperty(S.DateTime, if_missing=None) #Availability information availability = FieldProperty([ dict(week_day=str, start_time=dict(h=int, m=int), end_time=dict(h=int, m=int)) ]) localization = FieldProperty(dict(city=str, country=str)) timezone = FieldProperty(str) inactiveperiod = FieldProperty( [dict(start_date=S.DateTime, end_date=S.DateTime)]) #Additional contacts socialnetworks = FieldProperty([dict(socialnetwork=str, accounturl=str)]) telnumbers = FieldProperty([str]) skypeaccount = FieldProperty(str) webpages = FieldProperty([str]) #Skills list skills = FieldProperty([ dict(category_id=S.ObjectId, level=S.OneOf('low', 'high', 'medium'), comment=str) ]) #Statistics stats_id = FieldProperty(S.ObjectId, if_missing=None) @property def activity_name(self): return self.display_name or self.username @property def stats(self): if 'userstats' in g.entry_points['stats']: from forgeuserstats.model.stats import UserStats if self.stats_id: return UserStats.query.get(_id=self.stats_id) return UserStats.create(self) else: return None def get_pref(self, pref_name): return plugin.UserPreferencesProvider.get().get_pref(self, pref_name) def set_pref(self, pref_name, pref_value): return plugin.UserPreferencesProvider.get().set_pref( self, pref_name, pref_value) def add_socialnetwork(self, socialnetwork, accounturl): if socialnetwork == 'Twitter' and not accounturl.startswith('http'): accounturl = 'http://twitter.com/%s' % accounturl.replace('@', '') self.socialnetworks.append( dict(socialnetwork=socialnetwork, accounturl=accounturl)) def remove_socialnetwork(self, socialnetwork, oldurl): for el in self.socialnetworks: if el.socialnetwork == socialnetwork and el.accounturl == oldurl: del self.socialnetworks[self.socialnetworks.index(el)] return def add_telephonenumber(self, telnumber): self.telnumbers.append(telnumber) def remove_telephonenumber(self, oldvalue): for el in self.telnumbers: if el == oldvalue: del self.telnumbers[self.telnumbers.index(el)] return def add_webpage(self, webpage): self.webpages.append(webpage) def remove_webpage(self, oldvalue): for el in self.webpages: if el == oldvalue: del self.webpages[self.webpages.index(el)] return def add_timeslot(self, weekday, starttime, endtime): self.availability.append( dict(week_day=weekday, start_time=starttime, end_time=endtime)) def remove_timeslot(self, weekday, starttime, endtime): oldel = dict(week_day=weekday, start_time=starttime, end_time=endtime) for el in self.availability: if el == oldel: del self.availability[self.availability.index(el)] return def add_inactive_period(self, startdate, enddate): self.inactiveperiod.append(dict(start_date=startdate, end_date=enddate)) def remove_inactive_period(self, startdate, enddate): oldel = dict(start_date=startdate, end_date=enddate) for el in self.inactiveperiod: if el == oldel: del self.inactiveperiod[self.inactiveperiod.index(el)] return def get_localized_availability(self, tz_name): week_day = [ 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday' ] avail = self.get_availability_timeslots() usertimezone = timezone(self.get_pref('timezone')) chosentimezone = timezone(tz_name) retlist = [] for t in avail: today = datetime.today() start = datetime(today.year, today.month, today.day, t['start_time'].hour, t['start_time'].minute, 0) end = datetime(today.year, today.month, today.day, t['end_time'].hour, t['end_time'].minute, 0) loctime1 = usertimezone.localize(start) loctime2 = usertimezone.localize(end) convtime1 = loctime1.astimezone(chosentimezone) convtime2 = loctime2.astimezone(chosentimezone) dif_days_start = convtime1.weekday() - today.weekday() dif_days_end = convtime2.weekday() - today.weekday() index = (week_day.index(t['week_day']) + dif_days_start) % 7 week_day_start = week_day[index] week_day_end = week_day[index] if week_day_start == week_day_end: retlist.append( dict(week_day=week_day_start, start_time=convtime1.time(), end_time=convtime2.time())) else: retlist.append( dict(week_day=week_day_start, start_time=convtime1.time(), end_time=time(23, 59))) retlist.append( dict(week_day=week_day_end, start_time=time(0, 0), end_time=convtime2.time())) return sorted(retlist, key=lambda k: (week_day.index(k['week_day']), k['start_time'])) def get_skills(self): from allura.model.project import TroveCategory retval = [] for el in self.skills: d = dict(skill=TroveCategory.query.get(_id=el["category_id"]), level=el.level, comment=el.comment) retval.append(d) return retval def get_availability_timeslots(self): retval = [] for el in self.availability: start, end = (el.get('start_time'), el.get('end_time')) (starth, startm) = (start.get('h'), start.get('m')) (endh, endm) = (end.get('h'), end.get('m')) newdict = dict(week_day=el.get('week_day'), start_time=time(starth, startm, 0), end_time=time(endh, endm, 0)) retval.append(newdict) return retval def get_inactive_periods(self, include_past_periods=False): retval = [] for el in self.inactiveperiod: d1, d2 = (el.get('start_date'), el.get('end_date')) newdict = dict(start_date=d1, end_date=d2) if include_past_periods or newdict['end_date'] > datetime.today(): retval.append(newdict) return retval def url(self): return '/%s/' % plugin.AuthenticationProvider.get( request).user_project_shortname(self) @memoize def icon_url(self): icon_url = None try: private_project = self.private_project() except: log.warn('Error getting/creating user-project for %s', self.username, exc_info=True) private_project = None if private_project and private_project.icon: icon_url = self.url() + 'user_icon' elif self.preferences.email_address: icon_url = g.gravatar(self.preferences.email_address) return icon_url @classmethod def upsert(cls, username): u = cls.query.get(username=username) if u is not None: return u try: u = cls(username=username) session(u).flush(u) except pymongo.errors.DuplicateKeyError: session(u).expunge(u) u = cls.query.get(username=username) return u @classmethod def by_email_address(cls, addr): ea = EmailAddress.query.get(_id=addr) if ea is None: return None return ea.claimed_by_user() @classmethod def by_username(cls, name): if not name: return cls.anonymous() user = cls.query.get(username=name) if user: return user return plugin.AuthenticationProvider.get(request).by_username(name) @classmethod def by_display_name(cls, name): return plugin.UserPreferencesProvider.get().find_by_display_name(name) def get_tool_data(self, tool, key, default=None): return self.tool_data.get(tool, {}).get(key, None) def set_tool_data(self, tool, **kw): d = self.tool_data.setdefault(tool, {}) d.update(kw) state(self).soil() def address_object(self, addr): return EmailAddress.query.get(_id=addr, claimed_by_user_id=self._id) def openid_object(self, oid): return OpenId.query.get(_id=oid, claimed_by_user_id=self._id) def claim_openid(self, oid_url): oid_obj = OpenId.upsert(oid_url, self.get_pref('display_name')) oid_obj.claimed_by_user_id = self._id if oid_url in self.open_ids: return self.open_ids.append(oid_url) def claim_address(self, email_address): addr = EmailAddress.canonical(email_address) email_addr = EmailAddress.upsert(addr) email_addr.claimed_by_user_id = self._id if addr in self.email_addresses: return self.email_addresses.append(addr) def claim_only_addresses(self, *addresses): '''Claims the listed addresses and no others, setting the confirmed attribute to True on all. ''' self.email_addresses = [EmailAddress.canonical(a) for a in addresses] addresses = set(self.email_addresses) for addr in EmailAddress.query.find(dict(claimed_by_user_id=self._id)): if addr._id in addresses: if not addr.confirmed: addr.confirmed = True addresses.remove(addr._id) else: addr.delete() for a in addresses: addr = EmailAddress.upsert(a) addr.claimed_by_user_id = self._id addr.confirmed = True @classmethod def register(cls, doc, make_project=True): from allura import model as M auth_provider = plugin.AuthenticationProvider.get(request) user = auth_provider.register_user(doc) if user and 'display_name' in doc: user.set_pref('display_name', doc['display_name']) if user: g.statsUpdater.newUser(user) if user and make_project: n = M.Neighborhood.query.get(name='Users') n.register_project(auth_provider.user_project_shortname(user), user=user, user_project=True) return user @LazyProperty def neighborhood(self): from allura import model as M return M.Neighborhood.query.get(name='Users') def private_project(self): ''' Returns the personal user-project for the user ''' if self.disabled: return None from allura import model as M n = self.neighborhood auth_provider = plugin.AuthenticationProvider.get(request) project_shortname = auth_provider.user_project_shortname(self) p = M.Project.query.get(shortname=project_shortname, neighborhood_id=n._id) if p and p.deleted: # really delete it, since registering a new project would conflict with the "deleted" one log.info( 'completely deleting user project (was already flagged as deleted) %s', project_shortname) p.delete() ThreadLocalORMSession.flush_all() p = None if not p and not self.is_anonymous(): # create user-project on demand if it is missing p = n.register_project(project_shortname, user=self, user_project=True) return p @property def script_name(self): return '/u/' + self.username + '/' def my_projects(self, role_name=None): """Return projects to which this user belongs. If ``role_name`` is provided, return only projects for which user has that role. """ if self.is_anonymous(): return reaching_role_ids = list( g.credentials.user_roles(user_id=self._id).reaching_ids_set) reaching_roles = ProjectRole.query.find({ '_id': { '$in': reaching_role_ids } }).all() if not role_name: named_roles = [ r for r in reaching_roles if r.name and r.project and not r.project.deleted ] else: named_roles = [ r for r in reaching_roles if r.name == role_name and r.project and not r.project.deleted ] seen_project_ids = set() for r in named_roles: if r.project_id in seen_project_ids: continue seen_project_ids.add(r.project_id) yield r.project def project_role(self, project=None): if project is None: project = c.project if self.is_anonymous(): return ProjectRole.anonymous(project) else: return ProjectRole.upsert(user_id=self._id, project_id=project.root_project._id) def set_password(self, new_password): return plugin.AuthenticationProvider.get(request).set_password( self, self.password, new_password) @classmethod def anonymous(cls): return User.query.get(_id=None) def is_anonymous(self): return self._id is None or self.username == '' def email_address_header(self): h = header.Header() h.append(u'"%s" ' % self.get_pref('display_name')) h.append(u'<%s>' % self.get_pref('email_address')) return h def update_notifications(self): return plugin.AuthenticationProvider.get(request).update_notifications( self) @classmethod def withskill(cls, skill): return cls.query.find({"skills.category_id": skill._id}) def __json__(self): return dict( username=self.username, name=self.display_name, url=h.absurl(self.url()), )
class MonQTask(MappedClass): '''Task to be executed by the taskd daemon. Properties - _id - bson.ObjectId() for this task - state - 'ready', 'busy', 'error', or 'complete' task status - priority - integer priority, higher is more priority - result_type - either 'keep' or 'forget', what to do with the task when it's done - time_queue - time the task was queued - time_start - time taskd began working on the task - time_stop - time taskd stopped working on the task - task_name - full dotted name of the task function to run - process - identifier for which taskd process is working on the task - context - values used to set c.project, c.app, c.user for the task - args - *args to be sent to the task function - kwargs - **kwargs to be sent to the task function - result - if the task is complete, the return value. If in error, the traceback. ''' states = ('ready', 'busy', 'error', 'complete') result_types = ('keep', 'forget') class __mongometa__: session = main_orm_session name = 'monq_task' indexes = [ [ # used in MonQTask.get() method # also 'state' queries exist in several other methods ('state', ming.ASCENDING), ('priority', ming.DESCENDING), ('time_queue', ming.ASCENDING) ], [ # used by SF internal tool, but could be generally useful to # have an index on task_name 'state', 'task_name', 'time_queue' ], ] _id = FieldProperty(S.ObjectId) state = FieldProperty(S.OneOf(*states)) priority = FieldProperty(int) result_type = FieldProperty(S.OneOf(*result_types)) time_queue = FieldProperty(datetime, if_missing=datetime.utcnow) time_start = FieldProperty(datetime, if_missing=None) time_stop = FieldProperty(datetime, if_missing=None) task_name = FieldProperty(str) process = FieldProperty(str) context = FieldProperty( dict(project_id=S.ObjectId, app_config_id=S.ObjectId, user_id=S.ObjectId)) args = FieldProperty([]) kwargs = FieldProperty({None: None}) result = FieldProperty(None, if_missing=None) def __repr__(self): from allura import model as M project = M.Project.query.get(_id=self.context.project_id) app = None if project: app_config = M.AppConfig.query.get(_id=self.context.app_config_id) if app_config: app = project.app_instance(app_config) user = M.User.query.get(_id=self.context.user_id) project_url = project and project.url() or None app_mount = app and app.config.options.mount_point or None username = user and user.username or None return '<%s %s (%s) P:%d %s %s project:%s app:%s user:%s>' % ( self.__class__.__name__, self._id, self.state, self.priority, self.task_name, self.process, project_url, app_mount, username) @LazyProperty def function(self): '''The function that is called by this task''' smod, sfunc = self.task_name.rsplit('.', 1) cur = __import__(smod, fromlist=[sfunc]) return getattr(cur, sfunc) @classmethod def post(cls, function, args=None, kwargs=None, result_type='forget', priority=10): '''Create a new task object based on the current context.''' if args is None: args = () if kwargs is None: kwargs = {} task_name = '%s.%s' % (function.__module__, function.__name__) context = dict(project_id=None, app_config_id=None, user_id=None) if getattr(c, 'project', None): context['project_id'] = c.project._id if getattr(c, 'app', None): context['app_config_id'] = c.app.config._id if getattr(c, 'user', None): context['user_id'] = c.user._id obj = cls(state='ready', priority=priority, result_type=result_type, task_name=task_name, args=args, kwargs=kwargs, process=None, result=None, context=context) session(obj).flush(obj) try: if g.amq_conn: g.amq_conn.queue.put('') except: log.warning('Error putting to amq_conn', exc_info=True) return obj @classmethod def get(cls, process='worker', state='ready', waitfunc=None, only=None, exclude=None): '''Get the highest-priority, oldest, ready task and lock it to the current process. If no task is available and waitfunc is supplied, call the waitfunc before trying to get the task again. If waitfunc is None and no tasks are available, return None. ''' sort = [('priority', ming.DESCENDING), ('time_queue', ming.ASCENDING)] while True: try: query = dict(state=state) if exclude: query['task_name'] = {'$nin': exclude} if only: query['task_name'] = {'$in': only} obj = cls.query.find_and_modify( query=query, update={'$set': dict(state='busy', process=process)}, new=True, sort=sort) if obj is not None: return obj except pymongo.errors.OperationFailure, exc: if 'No matching object found' not in exc.args[0]: raise if waitfunc is None: return None waitfunc()
class ForgeDiscussionApp(Application): __version__ = version.__version__ #installable=False permissions = ['configure', 'read', 'unmoderated_post', 'post', 'moderate', 'admin'] config_options = Application.config_options + [ ConfigOption('PostingPolicy', schema.OneOf('ApproveOnceModerated', 'ModerateAll'), 'ApproveOnceModerated') ] PostClass=DM.ForumPost AttachmentClass=DM.ForumAttachment searchable=True tool_label='Discussion' default_mount_label='Discussion' default_mount_point='discussion' ordinal=7 icons={ 24:'images/forums_24.png', 32:'images/forums_32.png', 48:'images/forums_48.png' } def __init__(self, project, config): Application.__init__(self, project, config) self.root = RootController() self.api_root = RootRestController() self.admin = ForumAdminController(self) self.default_forum_preferences = dict( subscriptions={}) def has_access(self, user, topic): f = DM.Forum.query.get(shortname=topic.replace('.', '/'), app_config_id=self.config._id) return has_access(f, 'post', user=user)() def handle_message(self, topic, message): log.info('Message from %s (%s)', topic, self.config.options.mount_point) log.info('Headers are: %s', message['headers']) shortname=urllib.unquote_plus(topic.replace('.', '/')) forum = DM.Forum.query.get( shortname=shortname, app_config_id=self.config._id) if forum is None: log.error("Error looking up forum: %r", shortname) return self.handle_artifact_message(forum, message) def main_menu(self): '''Apps should provide their entries to be added to the main nav :return: a list of :class:`SitemapEntries <allura.app.SitemapEntry>` ''' return [ SitemapEntry( self.config.options.mount_label.title(), '.')] @property @h.exceptionless([], log) def sitemap(self): menu_id = self.config.options.mount_label.title() with h.push_config(c, app=self): return [ SitemapEntry(menu_id, '.')[self.sidebar_menu()] ] @property def forums(self): return DM.Forum.query.find(dict(app_config_id=self.config._id)).all() @property def top_forums(self): return self.subforums_of(None) def subforums_of(self, parent_id): return DM.Forum.query.find(dict( app_config_id=self.config._id, parent_id=parent_id, )).all() def admin_menu(self): admin_url = c.project.url() + 'admin/' + self.config.options.mount_point + '/' links = [] if has_access(self, 'configure')(): links.append(SitemapEntry('Forums', admin_url + 'forums')) links += super(ForgeDiscussionApp, self).admin_menu() return links def sidebar_menu(self): try: l = [] moderate_link = None forum_links = [] forums = DM.Forum.query.find(dict( app_config_id=c.app.config._id, parent_id=None, deleted=False)) for f in forums: if has_access(f,'read')(): if f.url() in request.url and h.has_access(f, 'moderate')(): moderate_link = SitemapEntry('Moderate', "%smoderate/" % f.url(), ui_icon=g.icons['pencil'], small = DM.ForumPost.query.find({'discussion_id':f._id, 'status':{'$ne': 'ok'}}).count()) forum_links.append(SitemapEntry(f.name, f.url(), small=f.num_topics)) l.append(SitemapEntry('Create Topic', c.app.url + 'create_topic', ui_icon=g.icons['plus'])) if has_access(c.app, 'configure')(): l.append(SitemapEntry('Add Forum', c.app.url + 'new_forum', ui_icon=g.icons['conversation'])) l.append(SitemapEntry('Admin Forums', c.project.url()+'admin/'+self.config.options.mount_point+'/forums', ui_icon=g.icons['pencil'])) if moderate_link: l.append(moderate_link) # if we are in a thread and not anonymous, provide placeholder links to use in js if '/thread/' in request.url and c.user not in (None, M.User.anonymous()): l.append(SitemapEntry( 'Mark as Spam', 'flag_as_spam', ui_icon=g.icons['flag'], className='sidebar_thread_spam')) if forum_links: l.append(SitemapEntry('Forums')) l = l + forum_links l.append(SitemapEntry('Help')) l.append(SitemapEntry('Formatting Help', c.app.url + 'markdown_syntax')) return l except: # pragma no cover log.exception('sidebar_menu') return [] def install(self, project): 'Set up any default permissions and roles here' # Don't call super install here, as that sets up discussion for a tool # Setup permissions role_admin = M.ProjectRole.by_name('Admin')._id role_developer = M.ProjectRole.by_name('Developer')._id role_auth = M.ProjectRole.by_name('*authenticated')._id role_anon = M.ProjectRole.by_name('*anonymous')._id self.config.acl = [ M.ACE.allow(role_anon, 'read'), M.ACE.allow(role_auth, 'post'), M.ACE.allow(role_auth, 'unmoderated_post'), M.ACE.allow(role_developer, 'moderate'), M.ACE.allow(role_admin, 'configure'), M.ACE.allow(role_admin, 'admin'), ] utils.create_forum(self, new_forum=dict( shortname='general', create='on', name='General Discussion', description='Forum about anything you want to talk about.', parent='', members_only=False, anon_posts=False, monitoring_email=None)) def uninstall(self, project): "Remove all the tool's artifacts from the database" DM.Forum.query.remove(dict(app_config_id=self.config._id)) DM.ForumThread.query.remove(dict(app_config_id=self.config._id)) DM.ForumPost.query.remove(dict(app_config_id=self.config._id)) super(ForgeDiscussionApp, self).uninstall(project)
class BlogPost(M.VersionedArtifact, ActivityObject): class __mongometa__: name = 'blog_post' history_class = BlogPostSnapshot unique_indexes = [('app_config_id', 'slug')] indexes = [ # for [[project_blog_posts]] macro ('app_config_id', 'state', 'timestamp'), # for [[neighborhood_blog_posts]] macro ('neighborhood_id', 'state', 'timestamp'), ] type_s = 'Blog Post' title = FieldProperty(str, if_missing='Untitled') text = FieldProperty(str, if_missing='') text_cache = FieldProperty(MarkdownCache) timestamp = FieldProperty(datetime, if_missing=datetime.utcnow) slug = FieldProperty(str) state = FieldProperty(schema.OneOf('draft', 'published'), if_missing='draft') neighborhood_id = ForeignIdProperty('Neighborhood', if_missing=None) link_regex = re.compile( r'^[^#]+$') # no target in the link, meaning no comments @property def activity_name(self): return 'a blog post' @property def activity_extras(self): d = ActivityObject.activity_extras.fget(self) d.update(summary=self.title) return d def author(self): '''The author of the first snapshot of this BlogPost''' return M.User.query.get( _id=self.get_version(1).author.id) or M.User.anonymous() def _get_date(self): return self.timestamp.date() def _set_date(self, value): self.timestamp = datetime.combine(value, self.time) date = property(_get_date, _set_date) def _get_time(self): return self.timestamp.time() def _set_time(self, value): self.timestamp = datetime.combine(self.date, value) time = property(_get_time, _set_time) @property def html_text(self): return g.markdown.cached_convert(self, 'text') @property def html_text_preview(self): """Return an html preview of the BlogPost text. Truncation happens at paragraph boundaries to avoid chopping markdown in inappropriate places. If the entire post is one paragraph, the full text is returned. If the entire text is <= 400 chars, the full text is returned. Else, at least 400 chars are returned, rounding up to the nearest whole paragraph. If truncation occurs, a hyperlink to the full text is appended. """ # Splitting on spaces or single lines breaks isn't sufficient as some # markup can span spaces and single line breaks. Converting to HTML # first and *then* truncating doesn't work either, because the # ellipsis tag ends up orphaned from the main text. ellipsis = '... [read more](%s)' % self.url() paragraphs = self.text.replace('\r', '').split('\n\n') total_length = 0 for i, p in enumerate(paragraphs): total_length += len(p) if total_length >= 400: break text = '\n\n'.join(paragraphs[:i + 1]) return g.markdown.convert( text + (ellipsis if i + 1 < len(paragraphs) else '')) @property def email_address(self): if self.config.options.get('AllowEmailPosting', True): domain = self.email_domain return '%s@%s%s' % (self.title.replace( '/', '.'), domain, config.common_suffix) else: return tg_config.get('forgemail.return_path') @staticmethod def make_base_slug(title, timestamp): slugsafe = ''.join(ch.lower() for ch in title.replace(' ', '-') if ch.isalnum() or ch == '-') return '%s/%s' % (timestamp.strftime('%Y/%m'), slugsafe) def make_slug(self): base = BlogPost.make_base_slug(self.title, self.timestamp) self.slug = base while True: try: session(self).insert_now(self, state(self)) return self.slug except DuplicateKeyError: self.slug = base + '-%.3d' % randint(0, 999) def url(self): return self.app.url + self.slug + '/' def shorthand_id(self): return self.slug def index(self): result = super(BlogPost, self).index() result.update(title=self.title, type_s=self.type_s, state_s=self.state, snippet_s='%s: %s' % (self.title, h.text.truncate(self.text, 200)), text=self.text) return result def get_version(self, version): HC = self.__mongometa__.history_class return HC.query.find({ 'artifact_id': self._id, 'version': int(version) }).one() def commit(self): activity = functools.partial(g.director.create_activity, c.user, related_nodes=[c.project], tags=['blog']) self.subscribe() super(BlogPost, self).commit() if self.version > 1: v1 = self.get_version(self.version - 1) v2 = self la = [line + '\n' for line in v1.text.splitlines()] lb = [line + '\n' for line in v2.text.splitlines()] diff = ''.join( difflib.unified_diff(la, lb, 'v%d' % v1.version, 'v%d' % v2.version)) description = diff if v1.state != 'published' and v2.state == 'published': activity('created', self) M.Feed.post(self, self.title, self.text, author=self.author(), pubdate=self.get_version(1).timestamp) description = self.text subject = '%s created post %s' % (c.user.username, self.title) elif v2.state == 'published': feed_item = self.feed_item() if feed_item: feed_item.title = self.title feed_item.description = g.markdown.convert(self.text) else: M.Feed.post(self, self.title, self.text, author=self.author(), pubdate=self.get_version(1).timestamp) if v1.title != v2.title: activity('renamed', self) subject = '%s renamed post %s to %s' % (c.user.username, v1.title, v2.title) else: activity('modified', self) subject = '%s modified post %s' % (c.user.username, self.title) elif v1.state == 'published' and v2.state == 'draft': feed_item = self.feed_item() if feed_item: feed_item.delete() else: description = self.text subject = '%s created post %s' % (c.user.username, self.title) if self.state == 'published': activity('created', self) M.Feed.post(self, self.title, self.text, author=self.author(), pubdate=self.timestamp) if self.state == 'published': M.Notification.post(artifact=self, topic='metadata', text=description, subject=subject) @classmethod def new(cls, **kw): post = cls() for k, v in kw.iteritems(): setattr(post, k, v) post.neighborhood_id = c.project.neighborhood_id post.make_slug() post.commit() M.Thread.new(discussion_id=post.app_config.discussion_id, ref_id=post.index_id(), subject='%s discussion' % post.title) return post def __json__(self, posts_limit=None, is_export=False): return dict(super(BlogPost, self).__json__(posts_limit=posts_limit, is_export=is_export), author=self.author().username, title=self.title, url=h.absurl('/rest' + self.url()), text=self.text, labels=list(self.labels), state=self.state) def feed_item(self): return M.Feed.query.get(ref_id=self.index_id(), link=self.link_regex) def delete(self): feed_item = self.feed_item() if feed_item: feed_item.delete() super(BlogPost, self).delete()
class Post(Message, VersionedArtifact, ActivityObject): class __mongometa__: name = 'post' history_class = PostHistory indexes = [ ('discussion_id', 'status', 'timestamp'), # used in general lookups, last_post, etc 'thread_id' ] type_s = 'Post' thread_id = ForeignIdProperty(Thread) discussion_id = ForeignIdProperty(Discussion) subject = FieldProperty(schema.Deprecated) status = FieldProperty( schema.OneOf('ok', 'pending', 'spam', if_missing='pending')) flagged_by = FieldProperty([schema.ObjectId]) flags = FieldProperty(int, if_missing=0) last_edit_date = FieldProperty(datetime, if_missing=None) last_edit_by_id = ForeignIdProperty(User) edit_count = FieldProperty(int, if_missing=0) spam_check_id = FieldProperty(str, if_missing='') text_cache = FieldProperty(MarkdownCache) thread = RelationProperty(Thread) discussion = RelationProperty(Discussion) def __json__(self): author = self.author() return dict(_id=str(self._id), thread_id=self.thread_id, slug=self.slug, subject=self.subject, status=self.status, text=self.text, flagged_by=map(str, self.flagged_by), timestamp=self.timestamp, author_id=str(author._id), author=author.username) @property def activity_name(self): return 'a comment' def has_activity_access(self, perm, user): """Return True if user has perm access to this object, otherwise return False. For the purposes of activitystreams, we're saying that the user does not have access to a 'comment' activity unless he also has access to the artifact on which it was posted (if there is one). """ artifact_access = True if self.thread.artifact: artifact_access = security.has_access(self.thread.artifact, perm, user, self.thread.artifact.project) return artifact_access and security.has_access(self, perm, user, self.project) def index(self): result = super(Post, self).index() result.update(title='Post by %s on %s' % (self.author().username, self.subject), name_s=self.subject, type_s='Post', text=self.text) return result @classmethod def discussion_class(cls): return cls.discussion.related @classmethod def thread_class(cls): return cls.thread.related @classmethod def attachment_class(cls): return DiscussionAttachment @property def parent(self): return self.query.get(_id=self.parent_id) @property def subject(self): subject = self.thread.subject if not subject: artifact = self.thread.artifact if artifact: subject = getattr(artifact, 'email_subject', '') return subject or '(no subject)' @LazyProperty def attachments(self): return self.attachment_class().query.find( dict(post_id=self._id, type='attachment')).all() def add_multiple_attachments(self, file_info): if isinstance(file_info, list): map(self.add_attachment, file_info) else: self.add_attachment(file_info) def add_attachment(self, file_info): if hasattr(file_info, 'file'): mime_type = file_info.type if not mime_type or '/' not in mime_type: mime_type = utils.guess_mime_type(file_info.filename) self.attach(file_info.filename, file_info.file, content_type=mime_type, post_id=self._id, thread_id=self.thread_id, discussion_id=self.discussion_id) def last_edit_by(self): return User.query.get(_id=self.last_edit_by_id) or User.anonymous() def primary(self): return self.thread.primary() def url(self): if self.thread: return self.thread.url() + h.urlquote(self.slug) + '/' else: # pragma no cover return None def url_paginated(self): '''Return link to the thread with a #target that poins to this comment. Also handle pagination properly. ''' if not self.thread: # pragma no cover return None limit, p, s = g.handle_paging(None, 0) # get paging limit if self.query.find(dict(thread_id=self.thread._id)).count() <= limit: # all posts in a single page page = 0 else: posts = self.thread.find_posts() posts = self.thread.create_post_threads(posts) def find_i(posts): '''Find the index number of this post in the display order''' q = [] def traverse(posts): for p in posts: if p['post']._id == self._id: return True # found q.append(p) if traverse(p['children']): return True traverse(posts) return len(q) page = find_i(posts) / limit slug = h.urlquote(self.slug) aref = ArtifactReference.query.get(_id=self.thread.ref_id) if aref and aref.artifact: url = aref.artifact.url() else: url = self.thread.url() if page == 0: return '%s?limit=%s#%s' % (url, limit, slug) return '%s?limit=%s&page=%s#%s' % (url, limit, page, slug) def shorthand_id(self): if self.thread: return '%s#%s' % (self.thread.shorthand_id(), self.slug) else: # pragma no cover return None def link_text(self): return self.subject def reply_subject(self): if self.subject and self.subject.lower().startswith('re:'): return self.subject else: return 'Re: ' + (self.subject or '(no subject)') def delete(self): self.attachment_class().remove(dict(post_id=self._id)) super(Post, self).delete() self.thread.num_replies = max(0, self.thread.num_replies - 1) def approve(self, file_info=None, notify=True): if self.status == 'ok': return self.status = 'ok' author = self.author() security.simple_grant(self.acl, author.project_role(self.project)._id, 'moderate') self.commit() if (c.app.config.options.get('PostingPolicy') == 'ApproveOnceModerated' and author._id != None): security.simple_grant(self.acl, author.project_role()._id, 'unmoderated_post') if notify: self.notify(file_info=file_info) artifact = self.thread.artifact or self.thread session(self).flush() self.thread.last_post_date = max(self.thread.last_post_date, self.mod_date) self.thread.update_stats() if hasattr(artifact, 'update_stats'): artifact.update_stats() if self.text: g.director.create_activity(author, 'posted', self, target=artifact, related_nodes=[self.app_config.project]) def notify(self, file_info=None, check_dup=False): if self.project.notifications_disabled: return # notifications disabled for entire project artifact = self.thread.artifact or self.thread n = Notification.query.get(_id=artifact.url() + self._id) if check_dup else None if not n: n = Notification.post(artifact, 'message', post=self, file_info=file_info) if not n: return if (hasattr(artifact, "monitoring_email") and artifact.monitoring_email): if hasattr(artifact, 'notify_post'): if artifact.notify_post: n.send_simple(artifact.monitoring_email) else: # Send if no extra checks required n.send_simple(artifact.monitoring_email) def spam(self): self.status = 'spam' self.thread.num_replies = max(0, self.thread.num_replies - 1) g.spam_checker.submit_spam(self.text, artifact=self, user=c.user)
class Post(Message, VersionedArtifact, ActivityObject): class __mongometa__: name = 'post' history_class = PostHistory indexes = ['discussion_id', 'thread_id'] type_s = 'Post' thread_id = ForeignIdProperty(Thread) discussion_id = ForeignIdProperty(Discussion) subject = FieldProperty(schema.Deprecated) status = FieldProperty( schema.OneOf('ok', 'pending', 'spam', if_missing='pending')) flagged_by = FieldProperty([schema.ObjectId]) flags = FieldProperty(int, if_missing=0) last_edit_date = FieldProperty(datetime, if_missing=None) last_edit_by_id = ForeignIdProperty(User) edit_count = FieldProperty(int, if_missing=0) thread = RelationProperty(Thread) discussion = RelationProperty(Discussion) def __json__(self): author = self.author() return dict(_id=str(self._id), thread_id=self.thread_id, slug=self.slug, subject=self.subject, status=self.status, text=self.text, flagged_by=map(str, self.flagged_by), timestamp=self.timestamp, author_id=str(author._id), author=author.username) @property def activity_name(self): return 'post %s' % self.subject def index(self): result = super(Post, self).index() result.update(title_s='Post by %s on %s' % (self.author().username, self.subject), name_s=self.subject, type_s='Post', text=self.text) return result @classmethod def discussion_class(cls): return cls.discussion.related @classmethod def thread_class(cls): return cls.thread.related @classmethod def attachment_class(cls): return DiscussionAttachment @property def parent(self): return self.query.get(_id=self.parent_id) @property def subject(self): subject = self.thread.subject if not subject: artifact = self.thread.artifact if artifact: subject = getattr(artifact, 'email_subject', '') return subject or '(no subject)' @property def attachments(self): return self.attachment_class().query.find( dict(post_id=self._id, type='attachment')) def last_edit_by(self): return User.query.get(_id=self.last_edit_by_id) or User.anonymous() def primary(self): return self.thread.primary() def summary(self): return '<a href="%s">%s</a> %s' % ( self.author().url(), self.author().get_pref('display_name'), h.ago(self.timestamp)) def url(self): if self.thread: return self.thread.url() + h.urlquote(self.slug) + '/' else: # pragma no cover return None def shorthand_id(self): if self.thread: return '%s#%s' % (self.thread.shorthand_id(), self.slug) else: # pragma no cover return None def link_text(self): return self.subject def reply_subject(self): if self.subject and self.subject.lower().startswith('re:'): return self.subject else: return 'Re: ' + (self.subject or '(no subject)') def delete(self): self.attachment_class().remove(dict(post_id=self._id)) super(Post, self).delete() self.thread.num_replies = max(0, self.thread.num_replies - 1) def approve(self, file_info=None): from allura.model.notification import Notification if self.status == 'ok': return self.status = 'ok' if self.parent_id is None: thd = self.thread_class().query.get(_id=self.thread_id) g.post_event('discussion.new_thread', thd._id) author = self.author() security.simple_grant(self.acl, author.project_role()._id, 'moderate') self.commit() if (c.app.config.options.get('PostingPolicy') == 'ApproveOnceModerated' and author._id != None): security.simple_grant(self.acl, author.project_role()._id, 'unmoderated_post') g.post_event('discussion.new_post', self.thread_id, self._id) artifact = self.thread.artifact or self.thread n = Notification.post(artifact, 'message', post=self, file_info=file_info) if hasattr(self.discussion, "monitoring_email") and self.discussion.monitoring_email: n.send_simple(self.discussion.monitoring_email) session(self).flush() self.thread.last_post_date = max(self.thread.last_post_date, self.mod_date) self.thread.update_stats() self.discussion.update_stats() def spam(self): self.status = 'spam' self.thread.num_replies = max(0, self.thread.num_replies - 1) g.post_event('spam', self.index_id())
class MappedEntity(MappedClass): STATUS = Bunch(READ="READ", UNREAD="UNREAD", INCOMPLETE="INCOMPLETE") _id = FieldProperty(s.ObjectId) _owner = ForeignIdProperty('User') owner = RelationProperty('User') _workspace = ForeignIdProperty('Workspace') workspace = RelationProperty('Workspace') hash = FieldProperty(s.String) title = FieldProperty(s.String, required=True) public = FieldProperty(s.Bool, if_missing=True) visible = FieldProperty(s.Bool, if_missing=True) status = FieldProperty(s.OneOf(*STATUS.values()), required=True, if_missing=STATUS.UNREAD) auto_generated = FieldProperty(s.Bool, if_missing=False) @property def created_at(self): return self._id.generation_time @classmethod def unread_count(cls, workspace_id): return cls.query.find({ 'status': cls.STATUS.UNREAD, '_workspace': ObjectId(workspace_id) }).count() or '' @property def dependencies(self): return [] @property def descendants(self): return [] @property def entity(self): return '' @property def url(self): return lurl('/%s/edit/' % self.entity, params=dict(workspace=self.workspace._id, _id=self._id)) @classmethod def by_id(cls, _id): return cls.query.get(ObjectId(_id)) @classmethod def upsert(cls, find, replace): # find_and_modify or other methods does not work for ming mapper extensions # so this is an artisan upsert implementation with instant flushes out of uow found = cls.query.get(**find) if not found: o = cls(**replace) DBSession.flush(o) return o for k, v in replace.items(): found[k] = v DBSession.flush(found) return found @classmethod def by_hash(cls, _hash): return cls.query.get(hash=_hash) @classmethod def mark_as_read(cls, user_oid, workspace_id): from ming.odm import mapper collection = mapper(cls).collection.m.collection collection.update_many( { '_owner': user_oid, 'status': cls.STATUS.UNREAD, '_workspace': ObjectId(workspace_id) }, update={'$set': { 'status': cls.STATUS.READ }}) @classmethod def available_for_user(cls, user_id, workspace=None): from ksweb.model import User return User.query.get(_id=user_id).owned_entities(cls, workspace).sort( [ ('auto_generated', pymongo.ASCENDING), ('status', pymongo.DESCENDING), ('title', pymongo.ASCENDING), ]) def update_dependencies(self, old): log = logging.getLogger(__name__) log.info( "Please implement this method in models if some action is needed for %s => %s" % (old, self.hash)) def dependent_filters(self): from ksweb.model import Precondition simple = Precondition.query.find(dict(condition=self._id)).all() simple_id = [_._id for _ in simple] advanced = Precondition.query.find( dict(workspace=self._workspace, condition={'$in': simple_id})).all() return simple + advanced def dependent_outputs(self): from ksweb.model import Output outputs = Output.query.find({'html': self.hash}).all() return outputs def __json__(self): _dict = dictify(self) _dict['entity'] = self.entity return _dict def exportable_dict(self): filter_out = [ '_workspace', '_owner', 'created_at', 'auto_generated', 'status', '_id' ] return { k: v for k, v in self.__json__().items() if k not in filter_out }
class Qa(MappedClass): QA_TYPE = [u"text", u"single", u"multi"] class __mongometa__: session = DBSession name = 'qas' indexes = [ ('title',), ('_owner',), ('_category',), ('type', 'public',), ] # __ROW_TYPE_CONVERTERS__ = { # #InstrumentedObj: _format_instrumented_obj, # InstrumentedList: _format_instrumented_list, # } __ROW_COLUM_CONVERTERS__ = { 'title': _custom_title, } _id = FieldProperty(s.ObjectId) _owner = ForeignIdProperty('User') owner = RelationProperty('User') _category = ForeignIdProperty('Category') category = RelationProperty('Category') _parent_precondition = ForeignIdProperty('Precondition') parent_precondition = RelationProperty('Precondition') title = FieldProperty(s.String, required=True) question = FieldProperty(s.String, required=True) tooltip = FieldProperty(s.String, required=False) link = FieldProperty(s.String, required=False) type = FieldProperty(s.OneOf(*QA_TYPE), required=True) answers = FieldProperty(s.Anything) public = FieldProperty(s.Bool, if_missing=True) visible = FieldProperty(s.Bool, if_missing=True) @classmethod def qa_available_for_user(cls, user_id, workspace=None): return User.query.get(_id=user_id).owned_entities(cls, workspace) @property def entity(self): return 'qa' @property def is_text(self): return self.type == self.QA_TYPE[0] @property def is_single(self): return self.type == self.QA_TYPE[1] @property def is_multi(self): return self.type == self.QA_TYPE[2] def __json__(self): from ksweb.lib.utils import to_dict _dict = to_dict(self) _dict['entity'] = self.entity return _dict
class Post(Message, VersionedArtifact, ActivityObject): class __mongometa__: name = 'post' history_class = PostHistory indexes = [ # used in general lookups, last_post, etc ('discussion_id', 'status', 'timestamp'), 'thread_id' ] type_s = 'Post' thread_id = ForeignIdProperty(Thread) discussion_id = ForeignIdProperty(Discussion) subject = FieldProperty(schema.Deprecated) status = FieldProperty(schema.OneOf('ok', 'pending', 'spam', if_missing='pending')) last_edit_date = FieldProperty(datetime, if_missing=None) last_edit_by_id = AlluraUserProperty() edit_count = FieldProperty(int, if_missing=0) spam_check_id = FieldProperty(str, if_missing='') text_cache = FieldProperty(MarkdownCache) # meta comment - system generated, describes changes to an artifact is_meta = FieldProperty(bool, if_missing=False) thread = RelationProperty(Thread) discussion = RelationProperty(Discussion) def __json__(self): author = self.author() return dict( _id=str(self._id), thread_id=self.thread_id, slug=self.slug, subject=self.subject, status=self.status, text=self.text, timestamp=self.timestamp, last_edited=self.last_edit_date, author_id=str(author._id), author=author.username) @property def activity_name(self): return 'a comment' @property def activity_url(self): return self.url_paginated() def has_activity_access(self, perm, user, activity): """Return True if user has perm access to this object, otherwise return False. For the purposes of activitystreams, we're saying that the user does not have access to a 'comment' activity unless he also has access to the artifact on which it was posted (if there is one). """ if self.project is None or self.deleted or self.status != 'ok': return False artifact_access = True if self.thread.artifact: if self.thread.artifact.project is None: return False if self.thread.artifact.deleted: return False artifact_access = security.has_access(self.thread.artifact, perm, user, self.thread.artifact.project) return artifact_access and security.has_access(self, perm, user, self.project) @property def activity_extras(self): d = ActivityObject.activity_extras.fget(self) # For activity summary, convert Post text to html, # strip all tags, and truncate LEN = 500 summary = jinja2.Markup.escape( g.markdown.cached_convert(self, 'text')).striptags() if len(summary) > LEN: split = max(summary.find(' ', LEN), LEN) summary = summary[:split] + '...' d.update(summary=summary) return d def index(self): result = super(Post, self).index() result.update( title='Post by %s on %s' % ( self.author().username, self.subject), name_s=self.subject, type_s='Post', text=self.text) return result @classmethod def discussion_class(cls): return cls.discussion.related @classmethod def thread_class(cls): return cls.thread.related @classmethod def attachment_class(cls): return DiscussionAttachment @property def parent(self): if self.parent_id: return self.query.get(_id=self.parent_id) @property def subject(self): subject = None if self.thread: subject = self.thread.subject if not subject: artifact = self.thread.artifact if artifact: subject = getattr(artifact, 'email_subject', '') return subject or '(no subject)' @LazyProperty def attachments(self): atts = self.attachment_class().query.find(dict( post_id=self._id, type='attachment')).all() return utils.unique_attachments(atts) def add_multiple_attachments(self, file_info): if isinstance(file_info, list): map(self.add_attachment, file_info) else: self.add_attachment(file_info) def add_attachment(self, file_info): if hasattr(file_info, 'file'): mime_type = file_info.type if not mime_type or '/' not in mime_type: mime_type = utils.guess_mime_type(file_info.filename) self.attach( file_info.filename, file_info.file, content_type=mime_type, post_id=self._id, thread_id=self.thread_id, discussion_id=self.discussion_id) def last_edit_by(self): return User.query.get(_id=self.last_edit_by_id) or User.anonymous() def primary(self): return self.thread.primary() def url(self): if self.thread: return self.thread.url() + h.urlquote(self.slug) + '/' else: # pragma no cover return None def parent_artifact(self): """ :return: the artifact (e.g Ticket, Wiki Page) that this Post belongs to. May return None. """ aref = ArtifactReference.query.get(_id=self.thread.ref_id) if aref and aref.artifact: return aref.artifact else: return None def main_url(self): """ :return: the URL for the artifact (e.g Ticket, Wiki Page) that this Post belongs to, else the default thread URL """ parent_artifact = self.parent_artifact() if parent_artifact: url = parent_artifact.url() else: url = self.thread.url() return url def url_paginated(self): '''Return link to the thread with a #target that poins to this comment. Also handle pagination properly. ''' if not self.thread: # pragma no cover return None limit, p, s = g.handle_paging(None, 0) # get paging limit if self.query.find(dict(thread_id=self.thread._id)).count() <= limit: # all posts in a single page page = 0 else: posts = self.thread.find_posts() posts = self.thread.create_post_threads(posts) def find_i(posts): '''Find the index number of this post in the display order''' q = [] def traverse(posts): for p in posts: if p['post']._id == self._id: return True # found q.append(p) if traverse(p['children']): return True traverse(posts) return len(q) page = find_i(posts) / limit slug = h.urlquote(self.slug) url = self.main_url() if page == 0: return '%s?limit=%s#%s' % (url, limit, slug) return '%s?limit=%s&page=%s#%s' % (url, limit, page, slug) def shorthand_id(self): if self.thread: return '%s#%s' % (self.thread.shorthand_id(), self.slug) else: # pragma no cover return None def link_text(self): return self.subject def reply_subject(self): if self.subject and self.subject.lower().startswith('re:'): return self.subject else: return 'Re: ' + (self.subject or '(no subject)') def delete(self): self.deleted = True session(self).flush(self) self.thread.update_stats() def approve(self, file_info=None, notify=True, notification_text=None): if self.status == 'ok': return self.status = 'ok' author = self.author() author_role = ProjectRole.by_user( author, project=self.project, upsert=True) if not author.is_anonymous(): security.simple_grant( self.acl, author_role._id, 'moderate') self.commit() if (c.app.config.options.get('PostingPolicy') == 'ApproveOnceModerated' and author._id != None): security.simple_grant( self.acl, author_role._id, 'unmoderated_post') if notify: self.notify(file_info=file_info, notification_text=notification_text) artifact = self.thread.artifact or self.thread session(self).flush() self.thread.last_post_date = max( self.thread.last_post_date, self.mod_date) self.thread.update_stats() if hasattr(artifact, 'update_stats'): artifact.update_stats() if self.text and not self.is_meta: g.director.create_activity(author, 'posted', self, target=artifact, related_nodes=[self.app_config.project], tags=['comment']) def notify(self, file_info=None, notification_text=None): if self.project.notifications_disabled: return # notifications disabled for entire project artifact = self.thread.artifact or self.thread msg_id = artifact.url() + self._id notification_params = dict( post=self, text=notification_text, file_info=file_info) n = Notification.query.get(_id=msg_id) if n and 'Moderation action required' in n.subject: # Existing notification for this artifact is for moderators only, # this means artifact was not auto approved, and all the # subscribers did not receive notification. Now, moderator approved # artifact/post, so we should re-send actual notification msg_id = u'approved-' + msg_id n = Notification.query.get(_id=msg_id) if n: # 'approved' notification also exists, re-send n.fire_notification_task([artifact, self.thread], 'message') else: # 'approved' notification does not exist, create notification_params['message_id'] = msg_id if not n: # artifact is Forum (or artifact like WikiPage) n = Notification.post(artifact, 'message', additional_artifacts_to_match_subscriptions=self.thread, **notification_params) if not n: return if getattr(artifact, 'monitoring_email', None): if hasattr(artifact, 'notify_post'): if artifact.notify_post: n.send_simple(artifact.monitoring_email) else: # Send if no extra checks required n.send_simple(artifact.monitoring_email) def spam(self, submit_spam_feedback=True): self.status = 'spam' if submit_spam_feedback: g.spam_checker.submit_spam(self.text, artifact=self, user=self.author()) session(self).flush(self) self.thread.update_stats() def undo(self, prev_status): if prev_status in ('ok', 'pending'): self.status = prev_status session(self).flush(self) self.thread.update_stats()
class Mailbox(MappedClass): ''' Holds a queue of notifications for an artifact, or a user (webflash messages) for a subscriber. FIXME: describe the Mailbox concept better. ''' class __mongometa__: session = main_orm_session name = 'mailbox' unique_indexes = [ ('user_id', 'project_id', 'app_config_id', 'artifact_index_id', 'topic', 'is_flash'), ] indexes = [('project_id', 'artifact_index_id'), ('is_flash', 'user_id'), ('type', 'next_scheduled')] _id = FieldProperty(S.ObjectId) user_id = ForeignIdProperty('User', if_missing=lambda: c.user._id) project_id = ForeignIdProperty('Project', if_missing=lambda: c.project._id) app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda: c.app.config._id) # Subscription filters artifact_title = FieldProperty(str) artifact_url = FieldProperty(str) artifact_index_id = FieldProperty(str) topic = FieldProperty(str) # Subscription type is_flash = FieldProperty(bool, if_missing=False) type = FieldProperty(S.OneOf('direct', 'digest', 'summary', 'flash')) frequency = FieldProperty(dict(n=int, unit=S.OneOf('day', 'week', 'month'))) next_scheduled = FieldProperty(datetime, if_missing=datetime.utcnow) last_modified = FieldProperty(datetime, if_missing=datetime(2000, 1, 1)) # a list of notification _id values queue = FieldProperty([str]) project = RelationProperty('Project') app_config = RelationProperty('AppConfig') @classmethod def subscribe(cls, user_id=None, project_id=None, app_config_id=None, artifact=None, topic=None, type='direct', n=1, unit='day'): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id tool_already_subscribed = cls.query.get(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=None) if tool_already_subscribed: log.debug( 'Tried to subscribe to artifact %s, while there is a tool subscription', artifact) return if artifact is None: artifact_title = 'All artifacts' artifact_url = None artifact_index_id = None else: i = artifact.index() artifact_title = i['title_s'] artifact_url = artifact.url() artifact_index_id = i['id'] artifact_already_subscribed = cls.query.get( user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id) if artifact_already_subscribed: return d = dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id, topic=topic) sess = session(cls) try: mbox = cls(type=type, frequency=dict(n=n, unit=unit), artifact_title=artifact_title, artifact_url=artifact_url, **d) sess.flush(mbox) except pymongo.errors.DuplicateKeyError: sess.expunge(mbox) mbox = cls.query.get(**d) mbox.artifact_title = artifact_title mbox.artifact_url = artifact_url mbox.type = type mbox.frequency.n = n mbox.frequency.unit = unit sess.flush(mbox) if not artifact_index_id: # Unsubscribe from individual artifacts when subscribing to the tool for other_mbox in cls.query.find( dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id)): if other_mbox is not mbox: other_mbox.delete() @classmethod def unsubscribe(cls, user_id=None, project_id=None, app_config_id=None, artifact_index_id=None, topic=None): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id cls.query.remove( dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id, topic=topic)) @classmethod def subscribed(cls, user_id=None, project_id=None, app_config_id=None, artifact=None, topic=None): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id if artifact is None: artifact_index_id = None else: i = artifact.index() artifact_index_id = i['id'] return cls.query.find( dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id)).count() != 0 @classmethod def deliver(cls, nid, artifact_index_id, topic): '''Called in the notification message handler to deliver notification IDs to the appropriate mailboxes. Atomically appends the nids to the appropriate mailboxes. ''' d = { 'project_id': c.project._id, 'app_config_id': c.app.config._id, 'artifact_index_id': { '$in': [None, artifact_index_id] }, 'topic': { '$in': [None, topic] } } for mbox in cls.query.find(d): mbox.query.update({ '$push': dict(queue=nid), '$set': dict(last_modified=datetime.utcnow()) }) # Make sure the mbox doesn't stick around to be flush()ed session(mbox).expunge(mbox) @classmethod def fire_ready(cls): '''Fires all direct subscriptions with notifications as well as all summary & digest subscriptions with notifications that are ready. Clears the mailbox queue. ''' now = datetime.utcnow() # Queries to find all matching subscription objects q_direct = dict(type='direct', queue={'$ne': []}) if MAILBOX_QUIESCENT: q_direct['last_modified'] = {'$lt': now - MAILBOX_QUIESCENT} q_digest = dict(type={'$in': ['digest', 'summary']}, next_scheduled={'$lt': now}) for mbox in cls.query.find(q_direct): mbox = cls.query.find_and_modify(query=dict(_id=mbox._id), update={'$set': dict(queue=[])}, new=False) mbox.fire(now) for mbox in cls.query.find(q_digest): next_scheduled = now if mbox.frequency.unit == 'day': next_scheduled += timedelta(days=mbox.frequency.n) elif mbox.frequency.unit == 'week': next_scheduled += timedelta(days=7 * mbox.frequency.n) elif mbox.frequency.unit == 'month': next_scheduled += timedelta(days=30 * mbox.frequency.n) mbox = cls.query.find_and_modify( query=dict(_id=mbox._id), update={'$set': dict(next_scheduled=next_scheduled, queue=[])}, new=False) mbox.fire(now) def fire(self, now): ''' Send all notifications that this mailbox has enqueued. ''' notifications = Notification.query.find(dict(_id={'$in': self.queue})) notifications = notifications.all() if self.type == 'direct': ngroups = defaultdict(list) for n in notifications: if n.topic == 'message': n.send_direct(self.user_id) # Messages must be sent individually so they can be replied # to individually else: key = (n.subject, n.from_address, n.reply_to_address, n.author_id) ngroups[key].append(n) # Accumulate messages from same address with same subject for (subject, from_address, reply_to_address, author_id), ns in ngroups.iteritems(): if len(ns) == 1: n.send_direct(self.user_id) else: Notification.send_digest(self.user_id, from_address, subject, ns, reply_to_address) elif self.type == 'digest': Notification.send_digest(self.user_id, u'*****@*****.**', 'Digest Email', notifications) elif self.type == 'summary': Notification.send_summary(self.user_id, u'*****@*****.**', 'Digest Email', notifications)
class User(MappedClass, ActivityNode, ActivityObject, SearchIndexable): SALT_LEN = 8 class __mongometa__: name = str('user') session = main_orm_session indexes = ['tool_data.sfx.userid', 'tool_data.AuthPasswordReset.hash'] unique_indexes = ['username'] custom_indexes = [ dict(fields=('tool_data.phone_verification.number_hash',), sparse=True), ] type_s = 'User' _id = FieldProperty(S.ObjectId) sfx_userid = FieldProperty(S.Deprecated) username = FieldProperty(str) email_addresses = FieldProperty([str]) password = FieldProperty(str) last_password_updated = FieldProperty(datetime) projects = FieldProperty(S.Deprecated) # full mount point: prefs dict tool_preferences = FieldProperty(S.Deprecated) tool_data = FieldProperty({str: {str: None}}) # entry point: prefs dict disabled = FieldProperty(bool, if_missing=False) pending = FieldProperty(bool, if_missing=False) # Don't use these directly, use get/set_pref() instead preferences = FieldProperty(dict( results_per_page=int, email_address=str, email_format=str, disable_user_messages=bool, mention_notifications=bool, multifactor=bool, )) # Additional top-level fields can/should be accessed with get/set_pref also # Not sure why we didn't put them within the 'preferences' dictionary :( display_name = FieldPropertyDisplayName(str) # Personal data sex = FieldProperty( S.OneOf('Male', 'Female', 'Other', 'Unknown', if_missing='Unknown')) birthdate = FieldProperty(S.DateTime, if_missing=None) # Availability information availability = FieldProperty([dict( week_day=str, start_time=dict(h=int, m=int), end_time=dict(h=int, m=int))]) localization = FieldProperty(dict(city=str, country=str)) timezone = FieldProperty(str) sent_user_message_times = FieldProperty([S.DateTime]) inactiveperiod = FieldProperty([dict( start_date=S.DateTime, end_date=S.DateTime)]) # Additional contacts socialnetworks = FieldProperty([dict(socialnetwork=str, accounturl=str)]) telnumbers = FieldProperty([str]) skypeaccount = FieldProperty(str) webpages = FieldProperty([str]) # Skills list skills = FieldProperty([dict( category_id=S.ObjectId, level=S.OneOf('low', 'high', 'medium'), comment=str)]) # Statistics stats_id = FieldProperty(S.ObjectId, if_missing=None) last_access = FieldProperty(dict( login_date=S.DateTime, login_ip=str, login_ua=str, session_date=S.DateTime, session_ip=str, session_ua=str)) def __repr__(self): return ('<User username={s.username!r} display_name={s.display_name!r} _id={s._id!r} ' 'disabled={s.disabled!r} pending={s.pending!r}>'.format(s=self)) def index(self): provider = plugin.AuthenticationProvider.get(None) # no need in request here localization = '%s/%s' % ( self.get_pref('localization')['country'], self.get_pref('localization')['city']) socialnetworks = ' '.join(['%s: %s' % (n['socialnetwork'], n['accounturl']) for n in self.get_pref('socialnetworks')]) fields = dict( id=self.index_id(), title='User %s' % self.username, url_s=self.url(), type_s=self.type_s, username_s=self.username, email_addresses_t=' '.join([e for e in self.email_addresses if e]), last_password_updated_dt=self.last_password_updated, disabled_b=self.disabled, pending_b=self.pending, results_per_page_i=self.get_pref('results_per_page'), email_address_s=self.get_pref('email_address'), email_format_s=self.get_pref('email_format'), disable_user_messages_b=self.get_pref('disable_user_messages'), display_name_t=self.get_pref('display_name'), sex_s=self.get_pref('sex'), birthdate_dt=self.get_pref('birthdate'), localization_s=localization, timezone_s=self.get_pref('timezone'), socialnetworks_t=socialnetworks, telnumbers_t=' '.join([t for t in self.get_pref('telnumbers') if t]), skypeaccount_s=self.get_pref('skypeaccount'), webpages_t=' '.join([p for p in self.get_pref('webpages') if p]), skills_t=' '.join([s['skill'].fullpath for s in self.get_skills() if s.get('skill')]), last_access_login_date_dt=self.last_access['login_date'], last_access_login_ip_s=self.last_access['login_ip'], last_access_login_ua_t=self.last_access['login_ua'], last_access_session_date_dt=self.last_access['session_date'], last_access_session_ip_s=self.last_access['session_ip'], last_access_session_ua_t=self.last_access['session_ua'], ) return dict(provider.index_user(self), **fields) def track_login(self, req): user_ip = utils.ip_address(req) user_agent = req.headers.get('User-Agent') self.last_access['login_date'] = datetime.utcnow() self.last_access['login_ip'] = user_ip self.last_access['login_ua'] = user_agent session(self).flush(self) def track_active(self, req): user_ip = utils.ip_address(req) user_agent = req.headers.get('User-Agent') now = datetime.utcnow() last_date = self.last_access['session_date'] date_changed = last_date is None or last_date.date() != now.date() ip_changed = user_ip != self.last_access['session_ip'] ua_changed = user_agent != self.last_access['session_ua'] if date_changed or ip_changed or ua_changed: self.last_access['session_date'] = datetime.utcnow() self.last_access['session_ip'] = user_ip self.last_access['session_ua'] = user_agent session(self).flush(self) def add_login_detail(self, detail): try: session(detail).flush(detail) except DuplicateKeyError: session(detail).expunge(detail) def backfill_login_details(self, auth_provider): # ".*" at start of regex and the DOTALL flag is needed only for the test, which uses mim # Fixed in ming f9f69d3c, so once we upgrade to 0.6.1+ we can remove it msg_regex = re.compile(r'.*^({})'.format('|'.join([re.escape(line_prefix) for line_prefix in auth_provider.trusted_auditlog_line_prefixes])), re.MULTILINE | re.DOTALL) for auditlog in AuditLog.for_user(self, message=msg_regex): if not msg_regex.search(auditlog.message): continue login_detail = auth_provider.login_details_from_auditlog(auditlog) if login_detail: self.add_login_detail(login_detail) def send_password_reset_email(self, email_address=None, subject_tmpl='{site_name} Password recovery'): if email_address is None: email_address = self.get_pref('email_address') reset_url = self.make_password_reset_url() log.info('Sending password recovery link to %s', email_address) subject = subject_tmpl.format(site_name=config['site_name']) text = g.jinja2_env.get_template('allura:templates/mail/forgot_password.txt').render(dict( user=self, config=config, reset_url=reset_url, )) allura.tasks.mail_tasks.send_system_mail_to_user(email_address, subject, text) def make_password_reset_url(self): hash = h.nonce(42) self.set_tool_data('AuthPasswordReset', hash=hash, hash_expiry=datetime.utcnow() + timedelta(seconds=int(config.get('auth.recovery_hash_expiry_period', 600)))) reset_url = h.absurl('/auth/forgotten_password/{}'.format(hash)) return reset_url def can_send_user_message(self): """Return true if User is permitted to send a mesage to another user. Returns False if User has exceeded the user message rate limit, in which case another message may not be sent until sufficient time has passed to clear the limit. """ now = datetime.utcnow() time_interval = timedelta(seconds=g.user_message_time_interval) self.sent_user_message_times = [t for t in self.sent_user_message_times if t + time_interval > now] return len(self.sent_user_message_times) < g.user_message_max_messages def time_to_next_user_message(self): """Return a timedelta of the time remaining before this user can send another user message. Returns zero if user message can be sent immediately. """ if self.can_send_user_message(): return 0 return (self.sent_user_message_times[0] + timedelta(seconds=g.user_message_time_interval) - datetime.utcnow()) def send_user_message(self, user, subject, message, cc): """Send a user message (email) to ``user``. """ tmpl = g.jinja2_env.get_template( 'allura:ext/user_profile/templates/message.html') tmpl_context = { 'message_text': message, 'site_name': config['site_name'], 'base_url': config['base_url'], 'user': c.user, } allura.tasks.mail_tasks.sendsimplemail.post( toaddr=user.get_pref('email_address'), fromaddr=self.get_pref('email_address'), reply_to=self.get_pref('email_address'), message_id=h.gen_message_id(), subject=subject, text=tmpl.render(tmpl_context), cc=cc) self.sent_user_message_times.append(datetime.utcnow()) def send_user_mention_notification(self, mentioned_by, artifact): """Send user mention notification to {self} user. """ tmpl = g.jinja2_env.get_template('allura:templates/mail/usermentions_email.md') subject = '[%s:%s] Your name was mentioned' % ( c.project.shortname, c.app.config.options.mount_point) item_url = artifact.url() if artifact.type_s == 'Post': item_url = artifact.url_paginated() tmpl_context = { 'site_domain': config['domain'], 'base_url': config['base_url'], 'user': c.user, 'artifact_link': h.absurl(item_url), 'artifact_linktext': artifact.link_text(), 'mentioned_by': mentioned_by } allura.tasks.mail_tasks.sendsimplemail.post( toaddr=self.get_pref('email_address'), fromaddr=g.noreply, reply_to=g.noreply, message_id=h.gen_message_id(), subject=subject, text=tmpl.render(tmpl_context)) @property def activity_name(self): return self.display_name or self.username @property def activity_extras(self): d = ActivityObject.activity_extras.fget(self) d.update(icon_url=self.icon_url()) return d @property def stats(self): if 'userstats' in g.entry_points['stats']: from forgeuserstats.model.stats import UserStats if self.stats_id: return UserStats.query.get(_id=self.stats_id) return UserStats.create(self) else: return None def get_pref(self, pref_name): return plugin.UserPreferencesProvider.get().get_pref(self, pref_name) def set_pref(self, pref_name, pref_value): return plugin.UserPreferencesProvider.get().set_pref(self, pref_name, pref_value) def add_multivalue_pref(self, pref_name, pref_data): return plugin.UserPreferencesProvider.get().add_multivalue_pref(self, pref_name, pref_data) def remove_multivalue_pref(self, pref_name, pref_data): return plugin.UserPreferencesProvider.get().remove_multivalue_pref(self, pref_name, pref_data) def get_localized_availability(self, tz_name): week_day = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] avail = self.get_availability_timeslots() usertimezone = timezone(self.get_pref('timezone') or 'UTC') chosentimezone = timezone(tz_name) retlist = [] for t in avail: today = datetime.today() start = datetime( today.year, today.month, today.day, t['start_time'].hour, t['start_time'].minute, 0) end = datetime( today.year, today.month, today.day, t['end_time'].hour, t['end_time'].minute, 0) loctime1 = usertimezone.localize(start) loctime2 = usertimezone.localize(end) convtime1 = loctime1.astimezone(chosentimezone) convtime2 = loctime2.astimezone(chosentimezone) dif_days_start = convtime1.weekday() - today.weekday() index = (week_day.index(t['week_day']) + dif_days_start) % 7 week_day_start = week_day[index] week_day_end = week_day[index] if week_day_start == week_day_end: retlist.append(dict( week_day=week_day_start, start_time=convtime1.time(), end_time=convtime2.time())) else: retlist.append(dict( week_day=week_day_start, start_time=convtime1.time(), end_time=time(23, 59))) retlist.append(dict( week_day=week_day_end, start_time=time(0, 0), end_time=convtime2.time())) return sorted( retlist, key=lambda k: (week_day.index(k['week_day']), k['start_time'])) def get_skills(self): from allura.model.project import TroveCategory retval = [] for el in self.skills: d = dict( skill=TroveCategory.query.get(_id=el["category_id"]), level=el.level, comment=el.comment) retval.append(d) return retval def get_availability_timeslots(self): retval = [] for el in self.get_pref('availability'): start, end = (el.get('start_time'), el.get('end_time')) (starth, startm) = (start.get('h'), start.get('m')) (endh, endm) = (end.get('h'), end.get('m')) newdict = dict( week_day=el.get('week_day'), start_time=time(starth, startm, 0), end_time=time(endh, endm, 0)) retval.append(newdict) return retval def get_inactive_periods(self, include_past_periods=False): retval = [] for el in self.inactiveperiod: d1, d2 = (el.get('start_date'), el.get('end_date')) newdict = dict(start_date=d1, end_date=d2) if include_past_periods or newdict['end_date'] > datetime.today(): retval.append(newdict) return retval def url(self): ''' Return the URL (relative to root domain) for this user's user-project. This includes any special handling via the :class:`~allura.lib.plugin.AuthenticationProvider` to determine the proper user-project name ''' return plugin.AuthenticationProvider.get(request).user_project_url(self) @memoize def icon_url(self, gravatar_default_url=None, return_more=False): icon_url = None try: private_project = self.private_project() except Exception: log.warn('Error getting/creating user-project for %s', self.username, exc_info=True) private_project = None icon_source = None if private_project and private_project.icon: icon_url = config.get('static.icon_base', '') + self.url() + 'user_icon' icon_source = 'local' elif self.preferences.email_address: gravatar_args = {} if gravatar_default_url: gravatar_args['d'] = gravatar_default_url icon_url = g.gravatar(self.preferences.email_address, **gravatar_args) icon_source = 'gravatar' elif config.get('default_avatar_image'): icon_url = config['default_avatar_image'] icon_source = 'default' if return_more: return icon_url, private_project, icon_source else: return icon_url @classmethod def upsert(cls, username): u = cls.query.get(username=username) if u is not None: return u try: u = cls(username=username) session(u).flush(u) except pymongo.errors.DuplicateKeyError: session(u).expunge(u) u = cls.query.get(username=username) return u @classmethod def by_email_address(cls, addr): addrs = EmailAddress.find(dict(email=addr, confirmed=True)) users = [ea.claimed_by_user() for ea in addrs] users = [u for u in users if u is not None] if len(users) > 1: log.warn('Multiple active users matching confirmed email %s %s. ' 'Using first one', [u.username for u in users], addr) return users[0] if len(users) > 0 else None @classmethod def by_username(cls, name): if not name: return cls.anonymous() user = cls.query.get(username=name) if user: return user return plugin.AuthenticationProvider.get(request).by_username(name) def get_tool_data(self, tool, key, default=None): return self.tool_data.get(tool, {}).get(key, None) def set_tool_data(self, tool, **kw): d = self.tool_data.setdefault(tool, {}) d.update(kw) state(self).soil() def address_object(self, addr): return EmailAddress.get(email=addr, claimed_by_user_id=self._id) def claim_address(self, email_address): addr = EmailAddress.canonical(email_address) email_addr = EmailAddress.create(addr) if email_addr: email_addr.claimed_by_user_id = self._id if addr not in self.email_addresses: self.email_addresses.append(addr) session(email_addr).flush(email_addr) return email_addr @classmethod def register(cls, doc, make_project=True): from allura import model as M auth_provider = plugin.AuthenticationProvider.get(request) user = auth_provider.register_user(doc) user.set_pref('mention_notifications', True) if user and 'display_name' in doc: user.set_pref('display_name', doc['display_name']) if user: g.statsUpdater.newUser(user) if user and make_project: n = M.Neighborhood.query.get(name='Users') n.register_project(auth_provider.user_project_shortname(user), user=user, user_project=True) return user @LazyProperty def neighborhood(self): from allura import model as M return M.Neighborhood.query.get(name='Users') def private_project(self): ''' Returns the personal user-project for the user ''' if self.disabled or self.pending: return None from allura import model as M n = self.neighborhood auth_provider = plugin.AuthenticationProvider.get(request) project_shortname = auth_provider.user_project_shortname(self) p = M.Project.query.get( shortname=project_shortname, neighborhood_id=n._id) if p and p.deleted: # really delete it, since registering a new project would conflict # with the "deleted" one log.info( 'completely deleting user project (was already flagged as deleted) %s', project_shortname) p.delete() ThreadLocalORMSession.flush_all() p = None if not p and not self.is_anonymous(): # create user-project on demand if it is missing p = n.register_project( project_shortname, user=self, user_project=True) return p @property def script_name(self): return '/u/' + self.username + '/' def my_projects(self): if self.is_anonymous(): return roles = g.credentials.user_roles(user_id=self._id) # filter out projects to which the user belongs to no named groups (i.e., role['roles'] is empty) projects = [r['project_id'] for r in roles if r['roles']] from .project import Project return Project.query.find({'_id': {'$in': projects}, 'deleted': False}).sort('name', pymongo.ASCENDING) def my_projects_by_role_name(self, role_name): """ Return only projects for which user has that role. """ if self.is_anonymous(): return [] reaching_role_ids = list( g.credentials.user_roles(user_id=self._id).reaching_ids_set) reaching_roles = ProjectRole.query.find( {'_id': {'$in': reaching_role_ids}, 'name': role_name}) projects = [r['project_id'] for r in reaching_roles] from .project import Project return Project.query.find({'_id': {'$in': projects}, 'deleted': False}).all() def my_merge_requests(self): if self.is_anonymous(): return from .repository import MergeRequest return MergeRequest.query.find({'creator_id': self._id}).sort('mod_date', pymongo.DESCENDING) def set_password(self, new_password): return plugin.AuthenticationProvider.get(request).set_password( self, None, new_password) @classmethod def anonymous(cls): return User.query.get(_id=None) def is_anonymous(self): return self._id is None or self.username == '' def email_address_header(self): h = header.Header() h.append('"%s" ' % self.get_pref('display_name')) h.append('<%s>' % self.get_pref('email_address')) return h def update_notifications(self): return plugin.AuthenticationProvider.get(request).update_notifications(self) @classmethod def withskill(cls, skill): return cls.query.find({"skills.category_id": skill._id}) def __json__(self): return dict( username=self.username, name=self.display_name, url=h.absurl(self.url()), ) def registration_date(self): p = plugin.AuthenticationProvider.get(request) d = p.user_registration_date(self) # provider's user_registration_date returns aware datetime (in UTC) # but we're using naive UTC time everywhere d = datetime.utcfromtimestamp(calendar.timegm(d.utctimetuple())) return d
class Qa(MappedEntity): class TYPES: TEXT = u'text' SINGLE = u'single' MULTI = u'multi' QA_TYPE = [TYPES.TEXT, TYPES.SINGLE, TYPES.MULTI] class __mongometa__: session = DBSession name = 'qas' indexes = [ ('title', ), ('_owner', ), ('_workspace', ), ( 'type', 'public', ), ('hash', ), ] extensions = [TriggerExtension] def custom_title(self): url = tg.url('/qa/edit', params=dict(_id=self._id, workspace=self._workspace)) auto = 'bot' if self.auto_generated else '' return Markup( "<span class='%s'></span><a href='%s' class='%s'>%s</a>" % (self.status, url, auto, self.title)) __ROW_COLUM_CONVERTERS__ = { 'title': custom_title, } _parent_precondition = ForeignIdProperty('Precondition') parent_precondition = RelationProperty('Precondition') question = FieldProperty(s.String, required=True) tooltip = FieldProperty(s.String, required=False) link = FieldProperty(s.String, required=False) type = FieldProperty(s.OneOf(*QA_TYPE), required=True) answers = FieldProperty(s.Anything) @property def entity(self): return 'qa' @property def is_text(self): return self.type == self.TYPES.TEXT @property def is_single(self): return self.type == self.TYPES.SINGLE @property def is_multi(self): return self.type == self.TYPES.MULTI @property def dependencies(self): return self.dependent_filters() + self.dependent_outputs() def update_dependencies(self, old): from ksweb.model import Output outputs = Output.query.find({'$text': {'$search': old}}).all() for o in outputs: old_hash = o.hash o.html = o.html.replace(old, self.hash) DBSession.flush(o) o.update_dependencies(old_hash) self.generate_output_from() def __get_common_fields(self, **kwargs): common = dict(_owner=self._owner, _workspace=self._workspace, public=self.public, visible=self.visible) common.update(**kwargs) return common def __generate_generic_filter_from(self, title, **kwargs): from ksweb.model import Precondition common = self.__get_common_fields(auto_generated=True, status=Precondition.STATUS.UNREAD) common.update(**kwargs) common.update({'title': title}) return Precondition.upsert({'title': title}, common) def generate_text_filter_from(self): from ksweb.model import Precondition return self.__generate_generic_filter_from( title=_(u'%s \u21d2 was compiled' % self.title), type=Precondition.TYPES.SIMPLE, condition=[self._id, ""]) def generate_filter_answer_from(self, answer): from ksweb.model import Precondition return self.__generate_generic_filter_from( title=u'%s \u21d2 %s' % (self.title, answer), type=Precondition.TYPES.SIMPLE, condition=[self._id, answer], ) def invalidate_outdated_filters(self): simple_filters = [f for f in self.dependent_filters() if f.is_simple] broken_filters = [ f for f in simple_filters for c in f.condition if isinstance(c, str) and c not in self.answers ] for f in broken_filters: from ksweb.model import Precondition f.status = Precondition.STATUS.INCOMPLETE def generate_filters_from(self): if self.is_text: return self.generate_text_filter_from() composed_condition = [] for __ in self.answers: composed_condition.append(self.generate_filter_answer_from(__)._id) composed_condition.append('or') del composed_condition[-1] self.invalidate_outdated_filters() from ksweb.model import Precondition return self.__generate_generic_filter_from( title=_(u'%s \u21d2 was compiled' % self.title), type=Precondition.TYPES.ADVANCED, condition=composed_condition) def generate_output_from(self): from ksweb.model import Output _filter = self.generate_filters_from() common = self.__get_common_fields() title = u'%s \u21d2 output' % self.title common.update({ '_precondition': _filter._id, 'html': '@{%s}' % self.hash, 'title': title }) o = Output.upsert({'title': title}, common) o.auto_generated = True o.status = Output.STATUS.UNREAD return o def export_items(self): items = set([self]) if self.parent_precondition: items.update(self.parent_precondition.export_items()) return items def exportable_dict(self): editable = super().exportable_dict() if self.parent_precondition: editable['_parent_precondition'] = self.parent_precondition.hash return editable
class Mailbox(MappedClass): ''' Holds a queue of notifications for an artifact, or a user (webflash messages) for a subscriber. FIXME: describe the Mailbox concept better. ''' class __mongometa__: session = main_orm_session name = 'mailbox' unique_indexes = [ ('user_id', 'project_id', 'app_config_id', 'artifact_index_id', 'topic', 'is_flash'), ] indexes = [ ('project_id', 'artifact_index_id'), ('is_flash', 'user_id'), ('type', 'next_scheduled'), # for q_digest ('type', 'queue_empty'), # for q_direct ('project_id', 'app_config_id', 'artifact_index_id', 'topic'), # for deliver() ] _id = FieldProperty(S.ObjectId) user_id = ForeignIdProperty('User', if_missing=lambda:c.user._id) project_id = ForeignIdProperty('Project', if_missing=lambda:c.project._id) app_config_id = ForeignIdProperty('AppConfig', if_missing=lambda:c.app.config._id) # Subscription filters artifact_title = FieldProperty(str) artifact_url = FieldProperty(str) artifact_index_id = FieldProperty(str) topic = FieldProperty(str) # Subscription type is_flash = FieldProperty(bool, if_missing=False) type = FieldProperty(S.OneOf('direct', 'digest', 'summary', 'flash')) frequency = FieldProperty(dict( n=int,unit=S.OneOf('day', 'week', 'month'))) next_scheduled = FieldProperty(datetime, if_missing=datetime.utcnow) last_modified = FieldProperty(datetime, if_missing=datetime(2000,1,1)) # a list of notification _id values queue = FieldProperty([str]) queue_empty = FieldProperty(bool) project = RelationProperty('Project') app_config = RelationProperty('AppConfig') @classmethod def subscribe( cls, user_id=None, project_id=None, app_config_id=None, artifact=None, topic=None, type='direct', n=1, unit='day'): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id tool_already_subscribed = cls.query.get(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=None) if tool_already_subscribed: return if artifact is None: artifact_title = 'All artifacts' artifact_url = None artifact_index_id = None else: i = artifact.index() artifact_title = h.get_first(i, 'title') artifact_url = artifact.url() artifact_index_id = i['id'] artifact_already_subscribed = cls.query.get(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id) if artifact_already_subscribed: return d = dict(user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id, topic=topic) sess = session(cls) try: mbox = cls( type=type, frequency=dict(n=n, unit=unit), artifact_title=artifact_title, artifact_url=artifact_url, **d) sess.flush(mbox) except pymongo.errors.DuplicateKeyError: sess.expunge(mbox) mbox = cls.query.get(**d) mbox.artifact_title = artifact_title mbox.artifact_url = artifact_url mbox.type = type mbox.frequency.n = n mbox.frequency.unit = unit sess.flush(mbox) if not artifact_index_id: # Unsubscribe from individual artifacts when subscribing to the tool for other_mbox in cls.query.find(dict( user_id=user_id, project_id=project_id, app_config_id=app_config_id)): if other_mbox is not mbox: other_mbox.delete() @classmethod def unsubscribe( cls, user_id=None, project_id=None, app_config_id=None, artifact_index_id=None, topic=None): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id cls.query.remove(dict( user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id, topic=topic)) @classmethod def subscribed( cls, user_id=None, project_id=None, app_config_id=None, artifact=None, topic=None): if user_id is None: user_id = c.user._id if project_id is None: project_id = c.project._id if app_config_id is None: app_config_id = c.app.config._id if artifact is None: artifact_index_id = None else: i = artifact.index() artifact_index_id = i['id'] return cls.query.find(dict( user_id=user_id, project_id=project_id, app_config_id=app_config_id, artifact_index_id=artifact_index_id)).count() != 0 @classmethod def deliver(cls, nid, artifact_index_id, topic): '''Called in the notification message handler to deliver notification IDs to the appropriate mailboxes. Atomically appends the nids to the appropriate mailboxes. ''' d = { 'project_id':c.project._id, 'app_config_id':c.app.config._id, 'artifact_index_id':{'$in':[None, artifact_index_id]}, 'topic':{'$in':[None, topic]} } mboxes = cls.query.find(d).all() log.debug('Delivering notification %s to mailboxes [%s]', nid, ', '.join([str(m._id) for m in mboxes])) for mbox in mboxes: try: mbox.query.update( {'$push':dict(queue=nid), '$set':dict(last_modified=datetime.utcnow(), queue_empty=False), }) # Make sure the mbox doesn't stick around to be flush()ed session(mbox).expunge(mbox) except: # log error but try to keep processing, lest all the other eligible # mboxes for this notification get skipped and lost forever log.exception( 'Error adding notification: %s for artifact %s on project %s to user %s', nid, artifact_index_id, c.project._id, mbox.user_id) @classmethod def fire_ready(cls): '''Fires all direct subscriptions with notifications as well as all summary & digest subscriptions with notifications that are ready. Clears the mailbox queue. ''' now = datetime.utcnow() # Queries to find all matching subscription objects q_direct = dict( type='direct', queue_empty=False, ) if MAILBOX_QUIESCENT: q_direct['last_modified']={'$lt':now - MAILBOX_QUIESCENT} q_digest = dict( type={'$in': ['digest', 'summary']}, next_scheduled={'$lt':now}) def find_and_modify_direct_mbox(): return cls.query.find_and_modify( query=q_direct, update={'$set': dict( queue=[], queue_empty=True, )}, new=False) for mbox in take_while_true(find_and_modify_direct_mbox): try: mbox.fire(now) except: log.exception('Error firing mbox: %s with queue: [%s]', str(mbox._id), ', '.join(mbox.queue)) raise # re-raise so we don't keep (destructively) trying to process mboxes for mbox in cls.query.find(q_digest): next_scheduled = now if mbox.frequency.unit == 'day': next_scheduled += timedelta(days=mbox.frequency.n) elif mbox.frequency.unit == 'week': next_scheduled += timedelta(days=7 * mbox.frequency.n) elif mbox.frequency.unit == 'month': next_scheduled += timedelta(days=30 * mbox.frequency.n) mbox = cls.query.find_and_modify( query=dict(_id=mbox._id), update={'$set': dict( next_scheduled=next_scheduled, queue=[], queue_empty=True, )}, new=False) mbox.fire(now) def fire(self, now): ''' Send all notifications that this mailbox has enqueued. ''' notifications = Notification.query.find(dict(_id={'$in':self.queue})) notifications = notifications.all() if len(notifications) != len(self.queue): log.error('Mailbox queue error: Mailbox %s queued [%s], found [%s]', str(self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications])) else: log.debug('Firing mailbox %s notifications [%s], found [%s]', str(self._id), ', '.join(self.queue), ', '.join([n._id for n in notifications])) if self.type == 'direct': ngroups = defaultdict(list) for n in notifications: try: if n.topic == 'message': n.send_direct(self.user_id) # Messages must be sent individually so they can be replied # to individually else: key = (n.subject, n.from_address, n.reply_to_address, n.author_id) ngroups[key].append(n) except: # log error but keep trying to deliver other notifications, # lest the other notifications (which have already been removed # from the mobx's queue in mongo) be lost log.exception( 'Error sending notification: %s to mbox %s (user %s)', n._id, self._id, self.user_id) # Accumulate messages from same address with same subject for (subject, from_address, reply_to_address, author_id), ns in ngroups.iteritems(): try: if len(ns) == 1: ns[0].send_direct(self.user_id) else: Notification.send_digest( self.user_id, from_address, subject, ns, reply_to_address) except: # log error but keep trying to deliver other notifications, # lest the other notifications (which have already been removed # from the mobx's queue in mongo) be lost log.exception( 'Error sending notifications: [%s] to mbox %s (user %s)', ', '.join([n._id for n in ns]), self._id, self.user_id) elif self.type == 'digest': Notification.send_digest( self.user_id, u'*****@*****.**', 'Digest Email', notifications) elif self.type == 'summary': Notification.send_summary( self.user_id, u'*****@*****.**', 'Digest Email', notifications)
class Precondition(MappedClass): PRECONDITION_TYPE = [u"simple", u"advanced"] PRECONDITION_OPERATOR = ['and', 'or', 'not', '(', ')'] PRECONDITION_CONVERTED_OPERATOR = ['&', '|', 'not', '(', ')'] class __mongometa__: session = DBSession name = 'preconditions' indexes = [ ('_owner', ), ] __ROW_COLUM_CONVERTERS__ = { 'title': _custom_title, 'content': _content_preview } _id = FieldProperty(s.ObjectId) _owner = ForeignIdProperty('User') owner = RelationProperty('User') _category = ForeignIdProperty('Category') category = RelationProperty('Category') title = FieldProperty(s.String, required=False) type = FieldProperty(s.OneOf(*PRECONDITION_TYPE), required=True) condition = FieldProperty(s.Anything) """ In case of type: simple the condition is like: [ObjectId('qa'), 'String_response'] In case of type advanced the condition is like: [ObjectId(precond_1), &, ObjectId(precond_2), | , ObjectId(precond_3)] """ public = FieldProperty(s.Bool, if_missing=True) visible = FieldProperty(s.Bool, if_missing=True) @classmethod def precondition_available_for_user(cls, user_id, workspace=None): if workspace: return cls.query.find({ '_owner': user_id, 'visible': True, '_category': ObjectId(workspace) }).sort('title') return cls.query.find({ '_owner': user_id, 'visible': True }).sort('title') @property def evaluate(self): if self.type == 'simple': print("evaluate simple precondition") return if self.type == 'advanced': print("evaluate advanced precondition") return @property def is_simple(self): return self.type == 'simple' @property def response_interested(self): """ Example of the return value { '5772314bc42d7513bb31e17c': <Qa title=u'Sesso' _category=ObjectId('575581e4c42d75124a0a9601') question=u'Di che sesso sei?' tooltip=None visible=True _owner=ObjectId('575581e4c42d75124a0a95fc') link=None answers=I[u'Maschio', u'Femmina'] _id=ObjectId('5772314bc42d7513bb31e17c') type=u'single' public=True>, '57723171c42d7513bb31e17d': <Qa title=u'Colori' _category=ObjectId('575581e4c42d75124a0a9602') question=u'Che colori ti piacciono?' tooltip=u'che colori ti piacciono' visible=True _owner=ObjectId('575581e4c42d75124a0a95fc') link=None answers=I[u'Rosso', u'Verde', u'Blu', u'Giallo'] _id=ObjectId('57723171c42d7513bb31e17d') type=u'multi' public=True> } :return: """ res_dict = {} if self.type == 'simple': qa = Qa.query.get(_id=self.condition[0]) res_dict[str(qa._id)] = qa if qa.parent_precondition: res_dict.update(qa.parent_precondition.response_interested) return res_dict for cond in self.condition: if cond in Precondition.PRECONDITION_OPERATOR: continue else: rel_ent = Precondition.query.get(_id=ObjectId(cond)) print(cond) res_dict.update(rel_ent.response_interested) return res_dict def get_qa(self): from . import Qa if not self.is_simple: return None return Qa.query.get(_id=ObjectId(self.condition[0])) @property def simple_text_response(self): """ Usato per verificare i filtri di tipo testo che abbiano risposta :return: """ return self.type == "simple" and self.condition[1] == "" @property def multiple_choice_response(self): if self.is_simple: qa = self.get_qa() return qa.is_multi return False @property def single_choice_response(self): if self.is_simple: qa = self.get_qa() return qa.is_single return False @property def entity(self): return 'precondition/simple' if self.is_simple else 'precondition/advanced' def __json__(self): from ksweb.lib.utils import to_dict _dict = to_dict(self) _dict['entity'] = self.entity return _dict