def pre(self): set_extension(request.environ, "json") MinimalController.pre(self) require_https() try: access_token = OAuth2AccessToken.get_token( self._get_bearer_token()) require(access_token) require(access_token.check_valid()) c.oauth2_access_token = access_token account = Account._byID36(access_token.user_id, data=True) require(account) require(not account._deleted) c.oauth_user = account except RequirementException: self._auth_error(401, "invalid_token") handler = self._get_action_handler() if handler: oauth2_perms = getattr(handler, "oauth2_perms", None) if oauth2_perms: grant = OAuth2Scope(access_token.scope) if grant.subreddit_only and c.site.name not in grant.subreddits: self._auth_error(403, "insufficient_scope") required_scopes = set(oauth2_perms['allowed_scopes']) if not (grant.scopes >= required_scopes): self._auth_error(403, "insufficient_scope") else: self._auth_error(400, "invalid_request")
def update_flair_counts(): flairs = Counter() user_ids = [] sr = Subreddit._byID(g.live_config["thebutton_srid"], data=True) raw = AccountsActiveBySR._cf.xget(sr._id36) for uid, _ in raw: user_ids.append(uid) for user_chunk in in_chunks(user_ids, size=100): users = Account._byID36(user_chunk, data=True, return_dict=False) for user in users: flair = user.flair_css_class(sr._id) if not flair: if user._date < ACCOUNT_CREATION_CUTOFF: flair = "no-press" else: flair = "cant-press" flairs[flair] += 1 if 'cheater' in flairs: del flairs['cheater'] sr.flair_counts = sorted( flairs.iteritems(), key=lambda x: 'z' if x[0] == 'no-press' else x[0], reverse=True) sr._commit()
def _delete_button_flair(user_id36s): users = Account._byID36(user_id36s, data=True, return_dict=False) for user in users: g.log.debug("deleting flair for %s" % user.name) setattr(user, 'flair_%s_text' % g.live_config["thebutton_srid"], None) setattr(user, 'flair_%s_css_class' % g.live_config["thebutton_srid"], None) user._commit()
def pre(self): set_extension(request.environ, "json") MinimalController.pre(self) require_https() try: access_token = OAuth2AccessToken.get_token(self._get_bearer_token()) require(access_token) require(access_token.check_valid()) c.oauth2_access_token = access_token account = Account._byID36(access_token.user_id, data=True) require(account) require(not account._deleted) c.oauth_user = account except RequirementException: self._auth_error(401, "invalid_token") handler = self._get_action_handler() if handler: oauth2_perms = getattr(handler, "oauth2_perms", None) if oauth2_perms: grant = OAuth2Scope(access_token.scope) if grant.subreddit_only and c.site.name not in grant.subreddits: self._auth_error(403, "insufficient_scope") required_scopes = set(oauth2_perms['allowed_scopes']) if not (grant.scopes >= required_scopes): self._auth_error(403, "insufficient_scope") else: self._auth_error(400, "invalid_request")
def authenticate_with_token(self): set_extension(request.environ, "json") set_content_type() require_https() require_domain(g.oauth_domain) try: access_token = OAuth2AccessToken.get_token( self._get_bearer_token()) require(access_token) require(access_token.check_valid()) c.oauth2_access_token = access_token account = Account._byID36(access_token.user_id, data=True) require(account) require(not account._deleted) c.oauth_user = account except RequirementException: self._auth_error(401, "invalid_token") handler = self._get_action_handler() if handler: oauth2_perms = getattr(handler, "oauth2_perms", None) if oauth2_perms: grant = OAuth2Scope(access_token.scope) required = set(oauth2_perms['allowed_scopes']) if not grant.has_access(c.site.name, required): self._auth_error(403, "insufficient_scope") c.oauth_scope = grant else: self._auth_error(400, "invalid_request")
def authenticate_with_token(self): set_extension(request.environ, "json") set_content_type() require_https() require_domain(g.oauth_domain) try: access_token = OAuth2AccessToken.get_token(self._get_bearer_token()) require(access_token) require(access_token.check_valid()) c.oauth2_access_token = access_token account = Account._byID36(access_token.user_id, data=True) require(account) require(not account._deleted) c.oauth_user = account except RequirementException: self._auth_error(401, "invalid_token") handler = self._get_action_handler() if handler: oauth2_perms = getattr(handler, "oauth2_perms", None) if oauth2_perms or True: grant = OAuth2Scope(access_token.scope) required = set(oauth2_perms['allowed_scopes']) if not grant.has_access(c.site.name, required): self._auth_error(403, "insufficient_scope") c.oauth_scope = grant else: self._auth_error(400, "invalid_request")
def update_flair_counts(): flairs = Counter() user_ids = [] sr = Subreddit._byID(g.live_config["thebutton_srid"], data=True) raw = [ba._id36 for ba in ButtonActivity._all()] for user_chunk in in_chunks(user_ids, size=100): users = Account._byID36(user_chunk, data=True, return_dict=False) for user in users: flair = user.flair_css_class(sr._id) if not flair: if user._date < ACCOUNT_CREATION_CUTOFF: flair = "no-press" else: flair = "cant-press" flairs[flair] += 1 if 'cheater' in flairs: del flairs['cheater'] sr.flair_counts = sorted(flairs.iteritems(), key=lambda x: 'z' if x[0] == 'no-press' else x[0], reverse=True) sr._commit()
def process_presence_update(msg): message_type = msg.delivery_info["routing_key"] payload = json.loads(msg.body) namespace = payload["namespace"] if not namespace.startswith("/robin/"): return user_id36 = posixpath.basename(namespace) room_namespace = posixpath.dirname(namespace) room_id = posixpath.basename(room_namespace) account = Account._byID36(user_id36, data=True, stale=True) try: room = RobinRoom._byID(room_id) except tdb_cassandra.NotFoundException: return if not room.is_participant(account): return presence_type = "join" if message_type == "websocket.connect" else "part" websockets.send_broadcast( namespace=room_namespace, type=presence_type, payload={ "user": account.name, }, ) if presence_type == "join": ParticipantPresenceByRoom.mark_joined(room, account) else: ParticipantPresenceByRoom.mark_exited(room, account)
def get_details(cls, thing, voters=None): from r2.models import Comment, Link if isinstance(thing, Link): details_cls = VoteDetailsByLink elif isinstance(thing, Comment): details_cls = VoteDetailsByComment else: raise ValueError voter_id36s = None if voters: voter_id36s = [voter._id36 for voter in voters] try: row = details_cls._byID(thing._id36, properties=voter_id36s) raw_details = row._values() except tdb_cassandra.NotFound: return [] try: row = VoterIPByThing._byID(thing._fullname, properties=voter_id36s) ips = row._values() except tdb_cassandra.NotFound: ips = {} # look up all the accounts in batches of 100 account_id36s = set(raw_details.keys()) accounts = {} for id_chunk in in_chunks(account_id36s, size=100): accounts.update(Account._byID36(id_chunk, data=True)) details = [] for voter_id36, json_data in raw_details.iteritems(): vote_data = json.loads(json_data) vote_data = cls.convert_old_details(vote_data) extra_data = vote_data["data"] extra_data["ip"] = ips.get(voter_id36) vote = Vote( user=accounts[voter_id36], thing=thing, direction=Vote.deserialize_direction(vote_data["direction"]), date=datetime.utcfromtimestamp(vote_data["date"]), data=extra_data, effects=vote_data["effects"], get_previous_vote=False, ) details.append(vote) details.sort(key=lambda d: d.date) return details
def get_details(cls, thing, voters=None): from r2.models import Comment, Link if isinstance(thing, Link): details_cls = VoteDetailsByLink elif isinstance(thing, Comment): details_cls = VoteDetailsByComment else: raise ValueError voter_id36s = None if voters: voter_id36s = [voter._id36 for voter in voters] try: row = details_cls._byID(thing._id36, properties=voter_id36s) raw_details = row._values() except tdb_cassandra.NotFound: return [] try: row = VoterIPByThing._byID(thing._fullname, properties=voter_id36s) ips = row._values() except tdb_cassandra.NotFound: ips = {} details = [] for voter_id36, json_data in raw_details.iteritems(): data = json.loads(json_data) data = cls.convert_old_details(data) user = Account._byID36(voter_id36, data=True) direction = Vote.deserialize_direction(data.pop("direction")) date = datetime.utcfromtimestamp(data.pop("date")) effects = data.pop("effects") data["ip"] = ips.get(voter_id36) vote = Vote(user, thing, direction, date, data, effects, get_previous_vote=False) details.append(vote) details.sort(key=lambda d: d.date) return details
def message_notification_email(data): """Queues a system email for a new message notification.""" from r2.lib.pages import MessageNotificationEmail MAX_EMAILS_PER_DAY = 1000 MESSAGE_THROTTLE_KEY = 'message_notification_emails' # If our counter's expired, initialize it again. g.cache.add(MESSAGE_THROTTLE_KEY, 0, time=24 * 60 * 60) for datum in data.itervalues(): datum = json.loads(datum) user = Account._byID36(datum['to'], data=True) comment = Comment._by_fullname(datum['comment'], data=True) # In case a user has enabled the preference while it was enabled for # them, but we've since turned it off. We need to explicitly state the # user because we're not in the context of an HTTP request from them. if not feature.is_enabled('orangereds_as_emails', user=user): continue if g.cache.get(MESSAGE_THROTTLE_KEY) > MAX_EMAILS_PER_DAY: raise Exception( 'Message notification emails: safety limit exceeded!') mac = generate_notification_email_unsubscribe_token( datum['to'], user_email=user.email, user_password_hash=user.password) base = g.https_endpoint or g.origin unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'], mac) templateData = { 'sender_username': datum.get('from', ''), 'comment': comment, 'permalink': datum['permalink'], 'unsubscribe_link': unsubscribe_link, } _system_email( user.email, MessageNotificationEmail(**templateData).render(style='email'), Email.Kind.MESSAGE_NOTIFICATION, from_address=g.notification_email) g.stats.simple_event('email.message_notification.queued') g.cache.incr(MESSAGE_THROTTLE_KEY)
def message_notification_email(data): """Queues a system email for a new message notification.""" from r2.lib.pages import MessageNotificationEmail MAX_EMAILS_PER_DAY = 1000 MESSAGE_THROTTLE_KEY = 'message_notification_emails' # If our counter's expired, initialize it again. g.cache.add(MESSAGE_THROTTLE_KEY, 0, time=24*60*60) for datum in data.itervalues(): datum = json.loads(datum) user = Account._byID36(datum['to'], data=True) comment = Comment._by_fullname(datum['comment'], data=True) # In case a user has enabled the preference while it was enabled for # them, but we've since turned it off. We need to explicitly state the # user because we're not in the context of an HTTP request from them. if not feature.is_enabled('orangereds_as_emails', user=user): continue if g.cache.get(MESSAGE_THROTTLE_KEY) > MAX_EMAILS_PER_DAY: raise Exception( 'Message notification emails: safety limit exceeded!') mac = generate_notification_email_unsubscribe_token( datum['to'], user_email=user.email, user_password_hash=user.password) base = g.https_endpoint or g.origin unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'], mac) templateData = { 'sender_username': datum.get('from', ''), 'comment': comment, 'permalink': datum['permalink'], 'unsubscribe_link': unsubscribe_link, } _system_email(user.email, MessageNotificationEmail(**templateData).render(style='email'), Email.Kind.MESSAGE_NOTIFICATION, from_address=g.notification_email) g.stats.simple_event('email.message_notification.queued') g.cache.incr(MESSAGE_THROTTLE_KEY)
def process_waitinglist(msg): user_id36 = msg.body user = Account._byID36(user_id36, data=True, stale=True) if RobinRoom.get_room_for_user(user): print "%s already in room" % user.name return with g.make_lock("robin_room", "global"): current_room_id = g.gencache.get("current_robin_room") if not current_room_id: current_room = make_new_room() else: try: current_room = RobinRoom._byID(current_room_id) except tdb_cassandra.NotFoundException: current_room_id = None current_room = make_new_room() if not current_room.is_alive or current_room.is_continued: current_room_id = None current_room = make_new_room() current_room.add_participants([user]) print "added %s to %s" % (user.name, current_room.id) if current_room_id: g.gencache.delete("current_robin_room") current_room.persist_computed_name() websockets.send_broadcast( namespace="/robin/" + current_room.id, type="updated_name", payload={ "room_name": current_room.name, }, ) else: g.gencache.set("current_robin_room", current_room.id)
def process_waitinglist(msg): user_id36 = msg.body user = Account._byID36(user_id36, data=True, stale=True) if RobinRoom.get_room_for_user(user): print "%s already in room" % user.name return with g.make_lock("robin_room", "global"): current_room_id = g.cache.get("current_robin_room") if not current_room_id: current_room = make_new_room() else: try: current_room = RobinRoom._byID(current_room_id) except tdb_cassandra.NotFoundException: current_room_id = None current_room = make_new_room() if not current_room.is_alive or current_room.is_continued: current_room_id = None current_room = make_new_room() current_room.add_participants([user]) print "added %s to %s" % (user.name, current_room.id) if current_room_id: g.cache.delete("current_robin_room") current_room.persist_computed_name() websockets.send_broadcast( namespace="/robin/" + current_room.id, type="updated_name", payload={ "room_name": current_room.name, }, ) else: g.cache.set("current_robin_room", current_room.id)
def generate_notification_email_unsubscribe_token(user_id36, user_email=None, user_password_hash=None): """Generate a token used for one-click unsubscribe links for notification emails. user_id36: A base36-encoded user id. user_email: The user's email. Looked up if not provided. user_password_hash: The hash of the user's password. Looked up if not provided. """ import hashlib import hmac if (not user_email) or (not user_password_hash): user = Account._byID36(user_id36, data=True) if not user_email: user_email = user.email if not user_password_hash: user_password_hash = user.password return hmac.new( g.secrets['email_notifications'], user_id36 + user_email + user_password_hash, hashlib.sha256).hexdigest()
def generate_notification_email_unsubscribe_token(user_id36, user_email=None, user_password_hash=None): """Generate a token used for one-click unsubscribe links for notification emails. user_id36: A base36-encoded user id. user_email: The user's email. Looked up if not provided. user_password_hash: The hash of the user's password. Looked up if not provided. """ import hashlib import hmac if (not user_email) or (not user_password_hash): user = Account._byID36(user_id36, data=True) if not user_email: user_email = user.email if not user_password_hash: user_password_hash = user.password return hmac.new(g.secrets['email_notifications'], user_id36 + user_email + user_password_hash, hashlib.sha256).hexdigest()
def message_notification_email(data): """Queues a system email for a new message notification.""" from r2.lib.pages import MessageNotificationEmail timer_start = time.time() MAX_EMAILS_PER_USER = 30 MAX_MESSAGES_PER_BATCH = 5 total_messages_sent = 0 inbox_item_lookup_count = 0 unique_user_list = make_message_dict_unique(data) g.log.info( "there are %s users for this batch of emails" % len(unique_user_list)) for datum in unique_user_list.itervalues(): user = Account._byID36(datum['to'], data=True) g.log.info('user fullname: %s' % user._fullname) # In case a user has enabled the preference while it was enabled for # them, but we've since turned it off. We need to explicitly state the # user because we're not in the context of an HTTP request from them. if not feature.is_enabled('orangereds_as_emails', user=user): g.log.info('feature not enabled for user: %s' % user._fullname) continue # Don't send more than MAX_EMAILS_PER_USER per user per day user_notification_ratelimit = SimpleRateLimit( name="email_message_notification_%s" % user._id36, seconds=int(datetime.timedelta(days=1).total_seconds()), limit=MAX_EMAILS_PER_USER, ) if not user_notification_ratelimit.check(): g.log.info('message blocked at user_notification_ratelimit: %s' % user_notification_ratelimit) continue # Get all new messages that haven't been emailed inbox_items = get_unread_and_unemailed(user) inbox_item_lookup_count += 1 if not inbox_items: g.log.info('no inbox items found for %s' % user._fullname) continue newest_inbox_rel = inbox_items[-1][0] oldest_inbox_rel = inbox_items[0][0] now = datetime.datetime.now(g.tz) start_date = datetime.datetime.strptime(datum['start_date'], "%Y-%m-%d %H:%M:%S").replace(tzinfo=g.tz) # If messages are still being queued within the cooling period or # messages have been queued past the max delay, then keep waiting # a little longer to batch all of the messages up if (start_date != newest_inbox_rel._date and now < newest_inbox_rel._date + NOTIFICATION_EMAIL_COOLING_PERIOD and now < oldest_inbox_rel._date + NOTIFICATION_EMAIL_MAX_DELAY): g.log.info('messages still being batched for: %s' % user._fullname) continue messages = [] message_count = 0 more_unread_messages = False non_preview_usernames = set() # Batch messages to email starting with older messages for inbox_rel, message in inbox_items: # Get sender_name, replacing with display_author if it exists g.log.info('user fullname: %s, message fullname: %s' % ( user._fullname, message._fullname)) sender_name = get_sender_name(message) if message_count >= MAX_MESSAGES_PER_BATCH: # prevent duplicate usernames for template display non_preview_usernames.add(sender_name) more_unread_messages = True else: link = None parent = None if isinstance(message, Comment): permalink = message.make_permalink_slow(context=1, force_domain=True) if message.parent_id: parent = Comment._byID(message.parent_id, data=True) else: link = Link._byID(message.link_id, data=True) else: permalink = message.make_permalink(force_domain=True) message_type = get_message_type(message, parent, user, link) messages.append({ "author_name": sender_name, "message_type": message_type, "body": message.body, "date": long_datetime(message._date), "permalink": permalink, "id": message._id, "fullname": message._fullname, "subject": getattr(message, 'subject', ''), }) inbox_rel.emailed = True inbox_rel._commit() message_count += 1 mac = generate_notification_email_unsubscribe_token( datum['to'], user_email=user.email, user_password_hash=user.password) base = g.https_endpoint or g.origin unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'], mac) inbox_url = base + '/message/inbox' # unique email_hash for emails, to be used in utm tags id_str = ''.join(str(message['id'] for message in messages)) email_hash = hashlib.sha1(id_str).hexdigest() base_utm_query = { 'utm_name': email_hash, 'utm_source': 'email', 'utm_medium':'message_notification', } non_preview_usernames_str = generate_non_preview_usernames_str( non_preview_usernames) templateData = { 'messages': messages, 'unsubscribe_link': unsubscribe_link, 'more_unread_messages': more_unread_messages, 'message_count': message_count, 'max_message_display_count': MAX_MESSAGES_PER_BATCH, 'non_preview_usernames_str': non_preview_usernames_str, 'base_url': base, 'base_utm_query': base_utm_query, 'inbox_url': inbox_url, } custom_headers = { 'List-Unsubscribe': "<%s>" % unsubscribe_link } g.log.info('sending message for user: %s' % user._fullname) g.email_provider.send_email( to_address=user.email, from_address="Reddit <%s>" % g.notification_email, subject=Email.subjects[Email.Kind.MESSAGE_NOTIFICATION], text=MessageNotificationEmail(**templateData).render(style='email'), html=MessageNotificationEmail(**templateData).render(style='html'), custom_headers=custom_headers, email_type='message_notification_email', ) total_messages_sent += 1 # report the email event to data pipeline g.events.orangered_email_event( request=request, context=c, user=user, messages=messages, email_hash=email_hash, reply_count=message_count, newest_reply_age=newest_inbox_rel._date, oldest_reply_age=oldest_inbox_rel._date, ) g.stats.simple_event('email.message_notification.queued') user_notification_ratelimit.record_usage() timer_end = time.time() g.log.info( "Took %s seconds to send orangered emails" % (timer_end - timer_start)) g.log.info("Total number of messages sent: %s" % total_messages_sent) g.log.info("Total count of inbox lookups: %s" % inbox_item_lookup_count)
def _connections(cls, meetup, user): rowkey = cls._rowkey(meetup, user) connections = cls.get_time_sorted_columns(rowkey).keys() return Account._byID36(connections, return_dict=False, data=True)
def add_props(cls, user, wrapped): from r2.lib.db.thing import Thing from r2.lib.menus import QueryButton from r2.lib.pages import WrappedUser from r2.models import ( Account, Link, ModSR, MultiReddit, Subreddit, ) target_names = {item.target_fullname for item in wrapped if hasattr(item, "target_fullname")} targets = Thing._by_fullname(target_names, data=True) # get moderators moderators = Account._byID36({item.mod_id36 for item in wrapped}, data=True) # get authors for targets that are Links or Comments target_author_names = {target.author_id for target in targets.values() if hasattr(target, "author_id")} target_authors = Account._byID(target_author_names, data=True) # get parent links for targets that are Comments parent_link_names = {target.link_id for target in targets.values() if hasattr(target, "link_id")} parent_links = Link._byID(parent_link_names, data=True) # get subreddits srs = Subreddit._byID36({item.sr_id36 for item in wrapped}, data=True) for item in wrapped: item.moderator = moderators[item.mod_id36] item.subreddit = srs[item.sr_id36] item.text = cls._text.get(item.action, '') item.target = None item.target_author = None if hasattr(item, "target_fullname") and item.target_fullname: item.target = targets[item.target_fullname] if hasattr(item.target, "author_id"): author_name = item.target.author_id item.target_author = target_authors[author_name] if hasattr(item.target, "link_id"): parent_link_name = item.target.link_id item.parent_link = parent_links[parent_link_name] if isinstance(item.target, Account): item.target_author = item.target if c.render_style == "html": request_path = request.path # make wrapped users for targets that are accounts user_targets = filter(lambda target: isinstance(target, Account), targets.values()) wrapped_user_targets = {user._fullname: WrappedUser(user) for user in user_targets} for item in wrapped: if isinstance(item.target, Account): user_name = item.target._fullname item.wrapped_user_target = wrapped_user_targets[user_name] css_class = 'modactions %s' % item.action action_button = QueryButton( '', item.action, query_param='type', css_class=css_class) action_button.build(base_path=request_path) item.action_button = action_button mod_button = QueryButton( item.moderator.name, item.moderator.name, query_param='mod') mod_button.build(base_path=request_path) item.mod_button = mod_button if isinstance(c.site, ModSR) or isinstance(c.site, MultiReddit): rgb = item.subreddit.get_rgb() item.bgcolor = 'rgb(%s,%s,%s)' % rgb item.is_multi = True else: item.bgcolor = "rgb(255,255,255)" item.is_multi = False
def message_notification_email(data): """Queues a system email for a new message notification.""" from r2.lib.pages import MessageNotificationEmail timer_start = time.time() MAX_EMAILS_PER_USER = 30 MAX_MESSAGES_PER_BATCH = 5 total_messages_sent = 0 inbox_item_lookup_count = 0 unique_user_list = make_message_dict_unique(data) g.log.info("there are %s users for this batch of emails" % len(unique_user_list)) for datum in unique_user_list.itervalues(): user = Account._byID36(datum['to'], data=True) g.log.info('user fullname: %s' % user._fullname) # In case a user has enabled the preference while it was enabled for # them, but we've since turned it off. We need to explicitly state the # user because we're not in the context of an HTTP request from them. if not feature.is_enabled('orangereds_as_emails', user=user): g.log.info('feature not enabled for user: %s' % user._fullname) continue # Don't send more than MAX_EMAILS_PER_USER per user per day user_notification_ratelimit = SimpleRateLimit( name="email_message_notification_%s" % user._id36, seconds=int(datetime.timedelta(days=1).total_seconds()), limit=MAX_EMAILS_PER_USER, ) if not user_notification_ratelimit.check(): g.log.info('message blocked at user_notification_ratelimit: %s' % user_notification_ratelimit) continue # Get all new messages that haven't been emailed inbox_items = get_unread_and_unemailed(user) inbox_item_lookup_count += 1 if not inbox_items: g.log.info('no inbox items found for %s' % user._fullname) continue newest_inbox_rel = inbox_items[-1][0] oldest_inbox_rel = inbox_items[0][0] now = datetime.datetime.now(g.tz) start_date = datetime.datetime.strptime( datum['start_date'], "%Y-%m-%d %H:%M:%S").replace(tzinfo=g.tz) # If messages are still being queued within the cooling period or # messages have been queued past the max delay, then keep waiting # a little longer to batch all of the messages up if (start_date != newest_inbox_rel._date and now < newest_inbox_rel._date + NOTIFICATION_EMAIL_COOLING_PERIOD and now < oldest_inbox_rel._date + NOTIFICATION_EMAIL_MAX_DELAY): g.log.info('messages still being batched for: %s' % user._fullname) continue messages = [] message_count = 0 more_unread_messages = False non_preview_usernames = set() # Batch messages to email starting with older messages for inbox_rel, message in inbox_items: # Get sender_name, replacing with display_author if it exists g.log.info('user fullname: %s, message fullname: %s' % (user._fullname, message._fullname)) sender_name = get_sender_name(message) if message_count >= MAX_MESSAGES_PER_BATCH: # prevent duplicate usernames for template display non_preview_usernames.add(sender_name) more_unread_messages = True else: link = None parent = None if isinstance(message, Comment): permalink = message.make_permalink_slow(context=1, force_domain=True) if message.parent_id: parent = Comment._byID(message.parent_id, data=True) else: link = Link._byID(message.link_id, data=True) else: permalink = message.make_permalink(force_domain=True) message_type = get_message_type(message, parent, user, link) messages.append({ "author_name": sender_name, "message_type": message_type, "body": message.body, "date": long_datetime(message._date), "permalink": permalink, "id": message._id, "fullname": message._fullname, "subject": getattr(message, 'subject', ''), }) inbox_rel.emailed = True inbox_rel._commit() message_count += 1 mac = generate_notification_email_unsubscribe_token( datum['to'], user_email=user.email, user_password_hash=user.password) base = g.https_endpoint or g.origin unsubscribe_link = base + '/mail/unsubscribe/%s/%s' % (datum['to'], mac) inbox_url = base + '/message/inbox' # unique email_hash for emails, to be used in utm tags id_str = ''.join(str(message['id'] for message in messages)) email_hash = hashlib.sha1(id_str).hexdigest() base_utm_query = { 'utm_name': email_hash, 'utm_source': 'email', 'utm_medium': 'message_notification', } non_preview_usernames_str = generate_non_preview_usernames_str( non_preview_usernames) templateData = { 'messages': messages, 'unsubscribe_link': unsubscribe_link, 'more_unread_messages': more_unread_messages, 'message_count': message_count, 'max_message_display_count': MAX_MESSAGES_PER_BATCH, 'non_preview_usernames_str': non_preview_usernames_str, 'base_url': base, 'base_utm_query': base_utm_query, 'inbox_url': inbox_url, } custom_headers = {'List-Unsubscribe': "<%s>" % unsubscribe_link} g.log.info('sending message for user: %s' % user._fullname) g.email_provider.send_email( to_address=user.email, from_address="Reddit <%s>" % g.notification_email, subject=Email.subjects[Email.Kind.MESSAGE_NOTIFICATION], text=MessageNotificationEmail(**templateData).render( style='email'), html=MessageNotificationEmail(**templateData).render(style='html'), custom_headers=custom_headers, email_type='message_notification_email', ) total_messages_sent += 1 # report the email event to data pipeline g.events.orangered_email_event( request=request, context=c, user=user, messages=messages, email_hash=email_hash, reply_count=message_count, newest_reply_age=newest_inbox_rel._date, oldest_reply_age=oldest_inbox_rel._date, ) g.stats.simple_event('email.message_notification.queued') user_notification_ratelimit.record_usage() timer_end = time.time() g.log.info("Took %s seconds to send orangered emails" % (timer_end - timer_start)) g.log.info("Total number of messages sent: %s" % total_messages_sent) g.log.info("Total count of inbox lookups: %s" % inbox_item_lookup_count)
def add_props(cls, user, wrapped): from r2.lib.db.thing import Thing from r2.lib.menus import QueryButton from r2.lib.pages import WrappedUser from r2.models import ( Account, Link, ModSR, MultiReddit, Subreddit, ) target_names = { item.target_fullname for item in wrapped if hasattr(item, "target_fullname") } targets = Thing._by_fullname(target_names, data=True) # get moderators moderators = Account._byID36({item.mod_id36 for item in wrapped}, data=True) # get authors for targets that are Links or Comments target_author_names = { target.author_id for target in targets.values() if hasattr(target, "author_id") } target_authors = Account._byID(target_author_names, data=True) # get parent links for targets that are Comments parent_link_names = { target.link_id for target in targets.values() if hasattr(target, "link_id") } parent_links = Link._byID(parent_link_names, data=True) # get subreddits srs = Subreddit._byID36({item.sr_id36 for item in wrapped}, data=True) for item in wrapped: item.moderator = moderators[item.mod_id36] item.subreddit = srs[item.sr_id36] item.text = cls._text.get(item.action, '') item.details = item.get_extra_text() item.target = None item.target_author = None if hasattr(item, "target_fullname") and item.target_fullname: item.target = targets[item.target_fullname] if hasattr(item.target, "author_id"): author_name = item.target.author_id item.target_author = target_authors[author_name] if hasattr(item.target, "link_id"): parent_link_name = item.target.link_id item.parent_link = parent_links[parent_link_name] if isinstance(item.target, Account): item.target_author = item.target if c.render_style == "html": request_path = request.path # make wrapped users for targets that are accounts user_targets = filter(lambda target: isinstance(target, Account), targets.values()) wrapped_user_targets = { user._fullname: WrappedUser(user) for user in user_targets } for item in wrapped: if isinstance(item.target, Account): user_name = item.target._fullname item.wrapped_user_target = wrapped_user_targets[user_name] css_class = 'modactions %s' % item.action action_button = QueryButton('', item.action, query_param='type', css_class=css_class) action_button.build(base_path=request_path) item.action_button = action_button mod_button = QueryButton(item.moderator.name, item.moderator.name, query_param='mod') mod_button.build(base_path=request_path) item.mod_button = mod_button if isinstance(c.site, ModSR) or isinstance( c.site, MultiReddit): item.bgcolor = 'rgb(%s,%s,%s)' % cls.get_rgb(item) item.is_multi = True else: item.bgcolor = "rgb(255,255,255)" item.is_multi = False