def set(self, value, groupId=None, siteId=None): ot = self.optionTable i = ot.insert() groupId = groupId or "" siteId = siteId or "" session = getSession() try: session.begin(subtransactions=True) session.execute(i, params={'component_id': self.componentId, 'option_id': self.optionId, 'group_id': groupId, 'site_id': siteId, 'value': value}) session.commit() except SQLAlchemyError: session.rollback() session = getSession() session.begin(subtransactions=True) u = ot.update(sa.and_(ot.c.component_id == self.componentId, ot.c.option_id == self.optionId, ot.c.group_id == groupId, ot.c.site_id == siteId)) session.execute(u, params={'value': value}) session.commit()
def set(self, value, groupId=None, siteId=None): ot = self.optionTable i = ot.insert() groupId = groupId or "" siteId = siteId or "" session = getSession() try: session.begin(subtransactions=True) session.execute(i, params={ 'component_id': self.componentId, 'option_id': self.optionId, 'group_id': groupId, 'site_id': siteId, 'value': value }) session.commit() except SQLAlchemyError: session.rollback() session = getSession() session.begin(subtransactions=True) u = ot.update( sa.and_(ot.c.component_id == self.componentId, ot.c.option_id == self.optionId, ot.c.group_id == groupId, ot.c.site_id == siteId)) session.execute(u, params={'value': value}) session.commit()
def files_metadata(self, post_id): """ Retrieve the metadata of all files associated with this post. Returns: {'file_id': ID, 'mime_type': String, 'file_name': String, 'file_size': Int} or [] """ ft = self.fileTable statement = ft.select() statement.append_whereclause(ft.c.post_id == post_id) session = getSession() r = session.execute(statement) out = [] if r.rowcount: out = [] for row in r: out.append({ 'file_id': row['file_id'], 'file_name': to_unicode(row['file_name']), 'date': row['date'], 'mime_type': to_unicode(row['mime_type']), 'file_size': row['file_size'] }) return out
def post(self, post_id): """ Retrieve a particular post. Returns: {'post_id': ID, 'group_id': ID, 'site_id': ID, 'subject': String, 'date': Date, 'author_id': ID, 'body': Text, 'hidden': DateOrNull, 'files_metadata': [Metadata] } or None """ pt = self.postTable statement = pt.select() statement.append_whereclause(pt.c.post_id == post_id) session = getSession() r = session.execute(statement) if r.rowcount: assert r.rowcount == 1, "Posts should always be unique" row = r.fetchone() return self.marshall_post(row) return None
def _nav_topic(self, curr_topic_id, direction): op = direction == 'prev' and '<=' or '>=' dir_ = direction == 'prev' and 'desc' or 'asc' s = sa.text("""select topic.last_post_date as date, topic.topic_id, topic.last_post_id, topic.original_subject as subject from topic, (select topic_id,last_post_date as date,group_id,site_id from topic where topic_id=:curr_topic_id) as curr_topic where topic.group_id=curr_topic.group_id and topic.site_id=curr_topic.site_id and topic.last_post_date %s curr_topic.date and topic.topic_id != curr_topic.topic_id order by date %s limit 1""" % (op, dir_)) session = getSession() r = session.execute(s, params={ 'curr_topic_id': curr_topic_id }).fetchone() if r: return { 'topic_id': r['topic_id'], 'last_post_id': r['last_post_id'], 'subject': to_unicode(r['subject']), 'date': r['date'] } return None
def _nav_post(self, curr_post_id, direction, topic_id=None): op = direction == 'prev' and '<=' or '>=' navDir = direction == 'prev' and 'desc' or 'asc' topic_id_filter = '' if topic_id: topic_id_filter = 'post.topic_id=curr_post.topic_id and' s = sa.text("""select post.date, post.post_id, post.topic_id, post.subject, post.user_id, post.has_attachments from post, (select date,group_id,site_id,post_id,topic_id from post where post_id=:curr_post_id) as curr_post where post.group_id=curr_post.group_id and post.site_id=curr_post.site_id and post.date %s curr_post.date and %s post.post_id != curr_post.post_id order by post.date %s limit 1""" % (op, topic_id_filter, navDir)) session = getSession() d = {'curr_post_id': curr_post_id} r = session.execute(s, params=d).fetchone() if r: return { 'post_id': r['post_id'], 'topic_id': r['topic_id'], 'subject': to_unicode(r['subject']), 'date': r['date'], 'author_id': r['user_id'], 'has_attachments': r['has_attachments'] } return None
def previousBounceDates(self, email): """ Checks for the number of bounces from this email address in the past LAST_NUM_DAYS, or since the address was last disabled. """ now = datetime.datetime.now(UTC) dateToCheck = (now - datetime.timedelta(LAST_NUM_DAYS)) lastDisabledDate = self.lastDisabledDate(email) if lastDisabledDate: lastDisabledDate = lastDisabledDate.replace(tzinfo=UTC) if lastDisabledDate > dateToCheck: dateToCheck = lastDisabledDate daysChecked = (now.date() - dateToCheck.date()).days bt = self.bounceTable s = bt.select(order_by=sa.desc(bt.c.date)) s.append_whereclause(bt.c.email == email) s.append_whereclause(bt.c.date > dateToCheck) session = getSession() r = session.execute(s) bounces = [] if r.rowcount: for row in r: bounceDate = row['date'].strftime("%Y%m%d") if bounceDate not in bounces: bounces.append(bounceDate) return (bounces, daysChecked)
def bounce_events(self, email): aet = self.auditEventTable # SELECT * FROM bounce_audit # WHERE subsystem == SUBSYSTEM # AND instance_datum == email; s = aet.select(order_by=sa.desc(aet.c.event_date)) s.append_whereclause(aet.c.subsystem == SUBSYSTEM) s.append_whereclause(aet.c.instance_datum == email) session = getSession() retval = [] r = session.execute(s) if r.rowcount: retval = [{ 'event_id': x['id'], 'date': x['event_date'], 'subsystem': x['subsystem'], 'code': x['event_code'], 'user_id': x['user_id'], 'instance_user_id': x['instance_user_id'], 'site_id': x['site_id'], 'group_id': x['group_id'], 'instanceDatum': x['instance_datum'], 'supplementaryDatum': x['supplementary_datum']} for x in r] return retval
def files_metadata_topic(self, topic_ids): ft = self.fileTable pt = self.postTable cols = [ pt.c.site_id, pt.c.group_id, pt.c.topic_id, pt.c.user_id, ft.c.post_id, ft.c.file_id, ft.c.mime_type, ft.c.file_name, ft.c.file_size, ft.c.date] statement = sa.select(cols, ft.c.topic_id.in_(topic_ids), order_by=self.fileTable.c.date) statement.append_whereclause(ft.c.post_id == pt.c.post_id) session = getSession() r = session.execute(statement) retval = [{ 'site_id': x['site_id'], 'group_id': x['group_id'], 'topic_id': x['topic_id'], 'user_id': x['user_id'], 'post_id': x['post_id'], 'file_id': x['file_id'], 'file_size': x['file_size'], 'mime_type': x['mime_type'], 'file_name': x['file_name'], 'date': x['date'], } for x in r] return retval
def posting_authors(self, siteId, groupId, limit=5): '''Get the most recently posting authors.''' # The query to get the five authors who most recently posted looks a # bit like the following. # SELECT user_id, MAX(date) AS max_date # FROM post # WHERE group_id = 'development' # AND site_id = 'groupserver' # AND hidden is NULL # GROUP BY user_id # ORDER BY max_date DESC # LIMIT 5; pt = self.postTable cols = [pt.c.user_id, sa.func.max(pt.c.date).label('max_date')] s = sa.select(cols, group_by=pt.c.user_id, order_by=(sa.desc('max_date')), limit=limit) s.append_whereclause(pt.c.group_id == groupId) s.append_whereclause(pt.c.site_id == siteId) s.append_whereclause(pt.c.hidden == None) # lint:ok session = getSession() r = session.execute(s) retval = [] for x in r: retval.append(x['user_id']) assert type(retval) == list return retval
def set_groupEmailSetting(self, setting): """ Given a site_id, group_id and a setting, set the email_setting table. """ if setting not in self.possible_settings: raise ValueError("Unknown setting %s" % setting) est = self.emailSettingTable and_ = sa.and_ curr_setting = self.get_groupEmailSetting() if not curr_setting: iOrU = est.insert() d = {'user_id': self.userId, 'site_id': self.siteId, 'group_id': self.groupId, 'setting': setting} else: iOrU = est.update(and_(est.c.user_id == self.userId, sa.or_(est.c.site_id == self.siteId, est.c.site_id == ''), est.c.group_id == self.groupId)) d = {'setting': setting, } session = getSession() session.execute(iOrU, params=d) mark_changed(session)
def search(self, searchTokens, siteId, groupId, limit=12, offset=0): # TODO Look at <https://sqlalchemy-searchable.readthedocs.org/> tt = self.topicTable tkt = self.topicKeywordsTable # SELECT topic.topic_id from topic # WHERE topic.topic_id = post.topic_id # AND topic.site_id = siteId # AND topic.group_id = groupId # AND topic.fts_vectors @@ to_tsquery(kw1 & kw2 & ... & kwn) # ORDER_BY DESC(topic.last_post_date) # LIMIT = limit # OFFSET = offset; s = sa.select(self.cols, order_by=sa.desc(tt.c.last_post_date), limit=limit, offset=offset) self.add_standard_where_clauses(s, siteId, groupId, False) s.append_whereclause(tt.c.topic_id == tkt.c.topic_id) if searchTokens.keywords: q = ' & '.join(searchTokens.keywords) s.append_whereclause(tt.c.fts_vectors.match(q)) session = getSession() r = session.execute(s) retval = [self.marshal_topic_info(x) for x in r] assert type(retval) == list return retval
def get_digest_addresses(self, site_id, group_id, id_getter): # TODO: We currently can't use site_id site_id = '' user_ids = id_getter(ids_only=True) est = self.emailSettingTable uet = self.userEmailTable guet = self.groupUserEmailTable email_settings = est.select() # FIXME: The user-group-email-settings were historically recorded # without a site identifier, relying on the # group-identifiers to be unique. We need to fix this. Sadly # this will require a lot of work to test. Just adding a # site identifier check here will cause the digests to not # go out. # email_settings.append_whereclause(est.c.site_id == site_id) email_settings.append_whereclause(est.c.group_id == group_id) email_settings.append_whereclause(est.c.setting == 'digest') session = getSession() r = session.execute(email_settings) digest_ids = [] ignore_ids = [] email_addresses = [] if r.rowcount: for row in r: if ((row['user_id'] in user_ids) and (row['user_id'] not in digest_ids)): digest_ids.append(row['user_id']) email_group = guet.select() email_group.append_whereclause(guet.c.site_id == site_id) email_group.append_whereclause(guet.c.group_id == group_id) email_group.append_whereclause(guet.c.user_id.in_(digest_ids)) r = session.execute(email_group) if r.rowcount: for row in r: ignore_ids.append(row['user_id']) email_addresses.append(row['email'].lower()) # remove any ids we have already processed digest_ids = [x for x in digest_ids if x not in ignore_ids] email_user = uet.select() #lint:disable email_user.append_whereclause(uet.c.is_preferred == True) email_user.append_whereclause(uet.c.user_id.in_(digest_ids)) email_user.append_whereclause(uet.c.verified_date != None) #lint:enable r = session.execute(email_user) if r.rowcount: for row in r: if row['user_id'] in user_ids: email_addresses.append(row['email'].lower()) email_addresses = self.process_blacklist(email_addresses) return email_addresses
def files_metadata_topic(self, topic_ids): ft = self.fileTable pt = self.postTable cols = [ pt.c.site_id, pt.c.group_id, pt.c.topic_id, pt.c.user_id, ft.c.post_id, ft.c.file_id, ft.c.mime_type, ft.c.file_name, ft.c.file_size, ft.c.date ] statement = sa.select(cols, ft.c.topic_id.in_(topic_ids), order_by=self.fileTable.c.date) statement.append_whereclause(ft.c.post_id == pt.c.post_id) session = getSession() r = session.execute(statement) retval = [{ 'site_id': x['site_id'], 'group_id': x['group_id'], 'topic_id': x['topic_id'], 'user_id': x['user_id'], 'post_id': x['post_id'], 'file_id': x['file_id'], 'file_size': x['file_size'], 'mime_type': x['mime_type'], 'file_name': x['file_name'], 'date': x['date'], } for x in r] return retval
def no_digest_but_active(self, interval='7 days', active_interval='3 months'): """ Returns a list of dicts containing site_id and group_id which have not received a digest in the 'interval' time period. """ s = sa.text("""SELECT DISTINCT topic.site_id, topic.group_id FROM (SELECT site_id, group_id, max(sent_date) AS sent_date FROM group_digest GROUP BY site_id,group_id) AS latest_digest, topic WHERE topic.site_id = latest_digest.site_id AND topic.group_id = latest_digest.group_id AND latest_digest.sent_date < CURRENT_TIMESTAMP-interval :interval AND topic.last_post_date > CURRENT_TIMESTAMP-interval :active_interval""") session = getSession() d = {'interval': interval, 'active_interval': active_interval} r = session.execute(s, params=d) retval = [] if r.rowcount: retval = [{ 'site_id': x['site_id'], 'group_id': x['group_id'] } for x in r] return retval
def post_id_from_legacy_id(self, legacy_post_id): """ Given a legacy (pre-1.0) GS post_id, determine what the new post ID is, if we know. This is primarily used for backwards compatibility in the redirection system. """ pit = self.post_id_mapTable if pit is None: return None statement = pit.select() statement.append_whereclause(pit.c.old_post_id == legacy_post_id) session = getSession() r = session.execute(statement) post_id = None if r.rowcount: result = r.fetchone() post_id = result['new_post_id'] return post_id
def previousBounceDates(self, email): """ Checks for the number of bounces from this email address in the past LAST_NUM_DAYS, or since the address was last disabled. """ now = datetime.datetime.now(UTC) dateToCheck = (now-datetime.timedelta(LAST_NUM_DAYS)) lastDisabledDate = self.lastDisabledDate(email) if lastDisabledDate: lastDisabledDate = lastDisabledDate.replace(tzinfo=UTC) if lastDisabledDate > dateToCheck: dateToCheck = lastDisabledDate daysChecked = (now.date() - dateToCheck.date()).days bt = self.bounceTable s = bt.select(order_by=sa.desc(bt.c.date)) s.append_whereclause(bt.c.email == email) s.append_whereclause(bt.c.date > dateToCheck) session = getSession() r = session.execute(s) bounces = [] if r.rowcount: for row in r: bounceDate = row['date'].strftime("%Y%m%d") if bounceDate not in bounces: bounces.append(bounceDate) return (bounces, daysChecked)
def _nav_topic(self, curr_topic_id, direction): op = direction == 'prev' and '<=' or '>=' dir_ = direction == 'prev' and 'desc' or 'asc' s = sa.text("""select topic.last_post_date as date, topic.topic_id, topic.last_post_id, topic.original_subject as subject from topic, (select topic_id,last_post_date as date,group_id,site_id from topic where topic_id=:curr_topic_id) as curr_topic where topic.group_id=curr_topic.group_id and topic.site_id=curr_topic.site_id and topic.last_post_date %s curr_topic.date and topic.topic_id != curr_topic.topic_id order by date %s limit 1""" % (op, dir_)) session = getSession() r = session.execute(s, params={'curr_topic_id': curr_topic_id}).fetchone() if r: return {'topic_id': r['topic_id'], 'last_post_id': r['last_post_id'], 'subject': to_unicode(r['subject']), 'date': r['date']} return None
def files_metadata(self, postId): """Retrieve the metadata of all files associated with a post :param str post_id: The identifier of a post :returns: The files for the post, or and empty list (``[]``) :rtype: list The dictionary representing the each file contains the following ================== ======== ============================= Key Type Note ================== ======== ============================= ``file_id`` str File identifier ``file_name`` Unicode File name ``date`` DateTime The date the file was created ``mime_type`` Unicode The MIME type of the file ``file_size`` int The size of the file in bytes ================== ======== ============================= """ ft = self.fileTable statement = ft.select() statement.append_whereclause(ft.c.post_id == postId) session = getSession() r = session.execute(statement) retval = [{ 'file_id': row['file_id'], 'file_name': to_unicode_or_bust(row['file_name']), 'date': row['date'], 'mime_type': to_unicode_or_bust(row['mime_type']), 'file_size': row['file_size'], } for row in r] return retval
def active_groups(self, interval='1 day'): """Retrieve all active groups An active group is one which has had a post added to it within "interval". ARGUMENTS "interval" An SQL interval, as a string, made up of "quantity unit". The quantity is an integer value, while the unit is one of "second", "minute", "hour", "day", "week", "month", "year", "decade", "century", or "millennium". RETURNS A list of dictionaries, which contain "group_id" and "site_id". SIDE EFFECTS None. See Also Section 8.5.1.4 of the PostgreSQL manual: http://www.postgresql.org/docs/8.0/interactive/datatype-datetime.html """ statement = sa.text("""SELECT DISTINCT group_id, site_id FROM topic WHERE age(CURRENT_TIMESTAMP, last_post_date) < INTERVAL :interval""") session = getSession() r = session.execute(statement, params={'interval': interval}) retval = [] if r.rowcount: retval = [{'site_id': x['site_id'], 'group_id': x['group_id']} for x in r] return retval
def files_metadata(self, post_id): """ Retrieve the metadata of all files associated with this post. Returns: {'file_id': ID, 'mime_type': String, 'file_name': String, 'file_size': Int} or [] """ ft = self.fileTable statement = ft.select() statement.append_whereclause(ft.c.post_id == post_id) session = getSession() r = session.execute(statement) out = [] if r.rowcount: out = [] for row in r: out.append({'file_id': row['file_id'], 'file_name': to_unicode(row['file_name']), 'date': row['date'], 'mime_type': to_unicode(row['mime_type']), 'file_size': row['file_size']}) return out
def add_invitation(self, invitiationId, siteId, groupId, userId, invtUsrId, initialInvite=False): assert invitiationId, 'invitiationId is %s' % invitiationId assert siteId, 'siteId is %s' % siteId assert groupId, 'groupId is %s' % groupId assert userId, 'userId is %s' % userId assert invtUsrId, 'invtUsrId is %s' % invtUsrId d = datetime.utcnow().replace(tzinfo=pytz.utc) i = self.userInvitationTable.insert() session = getSession() session.execute(i, params={ 'invitation_id': invitiationId, 'site_id': siteId, 'group_id': groupId, 'user_id': userId, 'inviting_user_id': invtUsrId, 'invitation_date': d, 'initial_invite': initialInvite }) mark_changed(session)
def has_digest_since(self, site_id, group_id, interval=datetime.timedelta(0.9)): """ Have there been any digests sent in the last 'interval' time period? (Default 21.6 hours) """ sincetime = self.now - interval dt = self.digestTable statement = dt.select() statement.append_whereclause(dt.c.site_id == site_id) statement.append_whereclause(dt.c.group_id == group_id) statement.append_whereclause(dt.c.sent_date >= sincetime) session = getSession() r = session.execute(statement) result = False if r.rowcount: result = True return result
def clear_nicknames(self): unt = self.nicknameTable d = unt.delete(unt.c.user_id == self.user_id) session = getSession() session.execute(d) mark_changed(session)
def remove_address(self, address): uet = self.userEmailTable d = uet.delete(sa.func.lower(uet.c.email) == address.lower()) session = getSession() session.execute(d) mark_changed(session)
def has_skip(self, userId): s = self.skipTable.select() s.append_whereclause(self.skipTable.c.user_id == userId) session = getSession() r = session.execute(s) retval = bool(r.rowcount) return retval
def recent_files(self, siteId, groupId, limit=5): pt = self.postTable ft = self.fileTable tt = self.topicTable cols = [ pt.c.user_id, pt.c.post_id, ft.c.file_id, ft.c.mime_type, ft.c.file_name, ft.c.file_size, ft.c.date, tt.c.original_subject, pt.c.date.label('post_date') ] s = sa.select(cols, order_by=(sa.desc('post_date')), limit=limit) s.append_whereclause(pt.c.group_id == groupId) s.append_whereclause(pt.c.site_id == siteId) s.append_whereclause(pt.c.hidden == None) # lint:ok s.append_whereclause(pt.c.post_id == ft.c.post_id) s.append_whereclause(pt.c.topic_id == tt.c.topic_id) session = getSession() r = session.execute(s) retval = [] if r.rowcount: retval = [{ 'user_id': row['user_id'], 'file_id': row['file_id'], 'mime_type': row['mime_type'], 'name': row['file_name'], 'size': row['file_size'], 'date': row['date'], 'post_date': row['post_date'], 'post_id': row['post_id'], 'subject': row['original_subject'] } for row in r] return retval
def post_search_keyword(self, searchTokens, site_id, group_ids=None, author_ids=None, limit=12, offset=0): if group_ids is None: group_ids = [] if author_ids is None: author_ids = [] pt = self.postTable cols = [pt.c.post_id, pt.c.user_id, pt.c.group_id, pt.c.subject, pt.c.date, pt.c.body, pt.c.has_attachments] statement = sa.select(cols, limit=limit, offset=offset, order_by=sa.desc(pt.c.date)) self.add_standard_where_clauses(statement, pt, site_id, group_ids, False) statement = self.__add_author_where_clauses(statement, author_ids) statement = self.__add_post_keyword_search_where_clauses(statement, searchTokens) session = getSession() r = session.execute(statement) retval = [] for x in r: p = { 'post_id': x['post_id'], 'user_id': x['user_id'], 'group_id': x['group_id'], 'subject': x['subject'], 'date': x['date'], 'body': x['body'], 'files_metadata': x['has_attachments'] and self.files_metadata(x['post_id']) or [], } retval.append(p) return retval
def set_groupEmailSetting(self, site_id, group_id, setting): """ Given a site_id, group_id and a setting, set the email_setting table. """ assert setting in possible_settings, "Unknown setting %s" % setting est = self.emailSettingTable and_ = sa.and_ curr_setting = self.get_groupEmailSetting(site_id, group_id) if not curr_setting: iOrU = est.insert() d = { 'user_id': self.user_id, 'site_id': site_id, 'group_id': group_id, 'setting': setting } else: iOrU = est.update( and_(est.c.user_id == self.context.getUserName(), est.c.site_id == site_id, est.c.group_id == group_id)) d = { 'setting': setting, } session = getSession() session.execute(iOrU, params=d) mark_changed(session)
def _nav_post(self, curr_post_id, direction, topic_id=None): op = direction == 'prev' and '<=' or '>=' navDir = direction == 'prev' and 'desc' or 'asc' topic_id_filter = '' if topic_id: topic_id_filter = 'post.topic_id=curr_post.topic_id and' s = sa.text("""select post.date, post.post_id, post.topic_id, post.subject, post.user_id, post.has_attachments from post, (select date,group_id,site_id,post_id,topic_id from post where post_id=:curr_post_id) as curr_post where post.group_id=curr_post.group_id and post.site_id=curr_post.site_id and post.date %s curr_post.date and %s post.post_id != curr_post.post_id order by post.date %s limit 1""" % (op, topic_id_filter, navDir)) session = getSession() d = {'curr_post_id': curr_post_id} r = session.execute(s, params=d).fetchone() if r: return {'post_id': r['post_id'], 'topic_id': r['topic_id'], 'subject': to_unicode(r['subject']), 'date': r['date'], 'author_id': r['user_id'], 'has_attachments': r['has_attachments']} return None
def set_reset_id(self, resetId): prt = self.passwordResetTable i = prt.insert() session = getSession() session.execute(i, params={'verification_id': resetId, 'user_id': self.userInfo.id}) mark_changed(session)
def unverify_address(self): uet = self.userEmailTable u = uet.update(sa.func.lower(uet.c.email) == self.email.lower()) d = {'verified_date': None} session = getSession() session.execute(u, params=d) mark_changed(session)
def skip_people(self): s = self.skipTable.select() session = getSession() r = session.execute(s) retval = [x['user_id'] for x in r] return retval
def clear_preferredEmail(self): uet = self.userEmailTable u = uet.update(uet.c.user_id == self.user_id) d = {'is_preferred': False} session = getSession() session.execute(u, params=d) mark_changed(session)
def clear_reset_ids(self): prt = self.passwordResetTable u = prt.update(sa.and_(prt.c.user_id == self.userInfo.id, prt.c.reset == None)) # lint:ok d = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) session = getSession() session.execute(u, params={'reset': d}) mark_changed(session)
def topics_sinse_yesterday(self, siteId, groupId): tt = self.topicTable tkt = self.topicKeywordsTable pt = self.postTable yesterday = datetime.datetime.now() - datetime.timedelta(days=1) #SELECT topic.topic_id, topic.original_subject, topic.last_post_id, # topic.last_post_date, topic.num_posts, cols = ( tt.c.topic_id, tt.c.site_id, tt.c.group_id, tt.c.original_subject, tt.c.first_post_id, tt.c.last_post_id, tt.c.num_posts, tt.c.last_post_date, tkt.c.keywords, # (SELECT COUNT(*) # FROM post # WHERE (post.topic_id = topic.topic_id) # AND post.date >= timestamp 'yesterday') # AS num_posts_day sa.select([sa.func.count(pt.c.post_id)], sa.and_(pt.c.date >= yesterday, pt.c.topic_id == tt.c.topic_id) ).as_scalar().label('num_posts_day'), # (SELECT post.user_id # FROM post # WHERE post.post_id = topic.last_post_id) # AS last_author_id sa.select([pt.c.user_id], pt.c.post_id == tt.c.last_post_id ).as_scalar().label('last_author_id')) s = sa.select(cols, order_by=sa.desc(tt.c.last_post_date)) # FROM topic # WHERE topic.site_id = 'main' # AND topic.group_id = 'mpls' s.append_whereclause(tt.c.site_id == siteId) s.append_whereclause(tt.c.group_id == groupId) # AND topic.last_post_date >= timestamp 'yesterday' s.append_whereclause(tt.c.last_post_date >= yesterday) s.append_whereclause(tt.c.topic_id == tkt.c.topic_id) session = getSession() r = session.execute(s) retval = [{ 'topic_id': x['topic_id'], 'subject': x['original_subject'], 'keywords': x['keywords'], 'first_post_id': x['first_post_id'], 'last_post_id': x['last_post_id'], 'last_post_date': x['last_post_date'], 'last_author_id': x['last_author_id'], 'num_posts': x['num_posts'], 'num_posts_day': x['num_posts_day'], } for x in r] return retval
def topic_sticky(self, topicId): s = sa.select([self.topicTable.c.sticky]) s.append_whereclause(self.topicTable.c.topic_id == topicId) session = getSession() r = session.execute(s) x = r.fetchone() retval = bool(x['sticky']) return retval
def blacklist(self): eb = self.emailBlacklist s = eb.select() session = getSession() r = session.execute(s) retval = [row['email'].strip().lower() for row in r] return retval
def add_request(self, requestId, userId, message, siteId, groupId): now = datetime.now(UTC) i = self.requestTable.insert() d = {"request_id": requestId, "user_id": userId, "message": message, "site_id": siteId, "group_id": groupId, "request_date": now} session = getSession() session.execute(i, params=d) mark_changed(session)
def group_join_leave_events(self, group_id): aet = self.auditEventTable # SELECT EXTRACT(year FROM event_date) AS year, # EXTRACT(month FROM event_date) AS month, # subsystem, event_date, instance_user_id, user_id # FROM audit_event # WHERE # ((subsystem = 'gs.group.member.join' AND event_code = '1') # OR # (subsystem = 'gs.group.member.leave' AND event_code = '1')) # AND group_id = 'example_group'; s = sa.select([ sa.extract('year', aet.c.event_date).label('year'), sa.extract('month', aet.c.event_date).label('month'), aet.c.subsystem, aet.c.event_date, aet.c.instance_user_id, aet.c.user_id ]) joinClauses = ((aet.c.subsystem == JOIN_SUBSYSTEM) & (aet.c.event_code == JOIN)) leaveClauses = ((aet.c.subsystem == LEAVE_SUBSYSTEM) & (aet.c.event_code == LEAVE)) s.append_whereclause(joinClauses | leaveClauses) s.append_whereclause(aet.c.group_id == group_id) session = getSession() r = session.execute(s) rows = [] if r.rowcount: rows = [{ 'year': int(row['year']), 'month': int(row['month']), 'date': row['event_date'], 'subsystem': row['subsystem'], 'user_id': row['instance_user_id'], 'admin_id': row['user_id'] } for row in r] years = {} for row in rows: if row['year'] not in years: years[row['year']] = {} for row in rows: if row['month'] not in years[row['year']]: years[row['year']][row['month']] = { JOIN_SUBSYSTEM: [], LEAVE_SUBSYSTEM: [] } for row in rows: years[row['year']][row['month']][row['subsystem']].append({ 'date': row['date'], 'user_id': row['user_id'], 'admin_id': row['admin_id'] }) retval = years assert type(retval) == dict return retval
def set_verification_id(self, verificationId): assert verificationId, 'No verificationId' evt = self.emailVerifyTable i = evt.insert() d = {'verification_id': verificationId, 'email': self.email} session = getSession() session.execute(i, params=d) mark_changed(session)
def update_delivery(self, address, isPreferred): uet = self.userEmailTable u = uet.update(sa.and_(uet.c.user_id == self.userId, sa.func.lower(uet.c.email) == address.lower())) d = {'is_preferred': isPreferred, } session = getSession() session.execute(u, params=d) mark_changed(session)
def update_hidden_post_table(self, postId, dt, userId, reason): i = self.hiddenPostTable.insert() session = getSession() d = {'post_id': postId, 'date_hidden': dt, 'hiding_user': userId, 'reason': reason} session.execute(i, params=d) mark_changed(session)
def set_sticky(self, topicId, sticky): session = getSession() tt = self.topicTable u = tt.update(tt.c.topic_id == topicId) if sticky: v = datetime.utcnow() else: v = None d = {'sticky': v} session.execute(u, params=d) mark_changed(session)
def get_email_from_verificationId(self, verificationId): evt = self.emailVerifyTable s = sa.select([evt.c.email], limit=1) s.append_whereclause(evt.c.verification_id == verificationId) session = getSession() r = session.execute(s).fetchone() retval = r and r['email'] or '' assert type(retval) in (str, unicode), 'Wrong return type "%s"' % \ type(retval) return retval