def get_alias(self, alias): aliases = {} if self.id: aliases['user'] = dict(db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s AND LOWER(alias)=%s;", [self.id, alias.lower()])) aliases['global'] = dict(db.fetchall( "SELECT alias, command FROM users.aliases " "WHERE LOWER(alias)=%s;", [alias.lower()])) return aliases
def get_alias(self, alias): aliases = {} if self.id: aliases['user'] = dict( db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s AND LOWER(alias)=%s;", [self.id, alias.lower()])) aliases['global'] = dict( db.fetchall( "SELECT alias, command FROM users.aliases " "WHERE LOWER(alias)=%s;", [alias.lower()])) return aliases
def get_subscribers(self, bluser=None): if bluser is not None: res = db.fetchall("SELECT user_id FROM subs.posts " "WHERE post_id=%s " "EXCEPT " "SELECT user_id " "FROM users.blacklist " "WHERE to_user_id=%s;", [unb26(self.id), bluser.id]) else: res = db.fetchall("SELECT user_id FROM subs.posts " "WHERE post_id=%s;", [unb26(self.id)]) return [ r[0] for r in res ]
def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict(db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict(db.fetchall( "SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def get_unconfirmed_accounts(self, type): if type not in ACCOUNT_TYPES: raise ValueError(type) res = db.fetchall( "SELECT address FROM users.accounts_unconfirmed " "WHERE user_id=%s AND type=%s;", [self.id, type]) return [r['address'] for r in res]
def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict( db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict( db.fetchall("SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def subscriptions(self, type=None): if not self.id: return [] key = "subs:%s:%s" % (self.id, type or 'all') res = cache_get(key) if not res: values = [self.id] if type: type_filter = " AND u.type=%s" values.append(type) else: type_filter = '' res = db.fetchall("SELECT u.id, u.login, u.type, " "i.name, i.gender, i.avatar, i.homepage " "FROM subs.users s " "JOIN users.logins u ON u.id=s.to_user_id " "LEFT OUTER JOIN users.info i " "ON i.id=s.to_user_id " "WHERE s.user_id=%%s %s;" % type_filter, values) cache_store(res, 120) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar'], 'homepage': r['homepage']}) users.append(u) return sorted(users, key=lambda u: u.login.lower())
def _set_public(self): res = [u['user_id'] for u in \ db.fetchall("DELETE FROM subs.requests " "WHERE to_user_id=%s RETURNING user_id;", [self.id])] db.batch("INSERT INTO subs.users VALUES(%(u)s, %(to)s);", [{'u':u, 'to':self.id} for u in res])
def get_unconfirmed_accounts(self, type): if type not in ACCOUNT_TYPES: raise ValueError(type) res = db.fetchall("SELECT address FROM users.accounts_unconfirmed " "WHERE user_id=%s AND type=%s;", [self.id, type]) return [r['address'] for r in res]
def blacklisters(self): if not self.id: return [] res = db.fetchall( """ SELECT u.id, u.login, i.name, i.gender, i.avatar FROM users.blacklist AS bl INNER JOIN users.logins AS u ON bl.user_id = u.id LEFT OUTER JOIN users.info AS i ON i.id = bl.user_id WHERE bl.to_user_id=%s; """, [self.id]) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={ 'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar'] }) users.append(u) users.sort(key=lambda u: u.login.lower()) return users
def blacklisters(): res = db.fetchall("SELECT u.login, count(b.to_user_id) cnt " "FROM users.blacklist b " "JOIN users.logins u ON b.user_id=u.id " "GROUP BY u.login ORDER BY cnt DESC LIMIT 20;") fd = open(os.path.join(settings.stat_path, "blacklisters.json"), "w") json.dump(list(res), fd) fd.close()
def recipients(self): res = db.fetchall( "SELECT u.id, u.login FROM posts.recipients r " "JOIN users.logins u ON u.id=r.user_id " "WHERE r.post_id=%s;", [unb26(self.id)], ) return [User.from_data(r[0], r[1]) for r in res]
def recommended_users(self): res = db.fetchall( "SELECT u.id, u.login FROM posts.recommendations r " "JOIN users.logins u ON u.id=r.user_id " "WHERE post_id=%s AND comment_id=0;", [unb26(self.id)], ) return [User.from_data(r[0], r[1]) for r in res]
def posts(): res = db.fetchall("SELECT created::date, count(id) AS cnt " "FROM posts.posts " "WHERE created::date >= %s AND created::date <= %s AND private=false " "GROUP BY created::date " "ORDER BY created::date;", [start.date(), end.date()]) fd = open(os.path.join(settings.stat_path, "posts.json"), "w") json.dump(map(lambda o: [o[0].strftime("%Y-%m-%d"), o[1]], res), fd) fd.close()
def posters_weekly(): res = db.fetchall("SELECT u.login, count(p.id) cnt " "FROM posts.posts p " "JOIN users.logins u ON p.author=u.id " "WHERE p.created::date > now() - interval '7 days' AND p.private=false " "GROUP BY u.login ORDER BY cnt DESC LIMIT 20;") fd = open(os.path.join(settings.stat_path, "posters_weekly.json"), "w") json.dump(list(res), fd) fd.close()
def posters_weekly(): res = db.fetchall( "SELECT u.login, count(p.id) cnt " "FROM posts.posts p " "JOIN users.logins u ON p.author=u.id " "WHERE p.created::date > now() - interval '7 days' AND p.private=false " "GROUP BY u.login ORDER BY cnt DESC LIMIT 20;") fd = open(os.path.join(settings.stat_path, "posters_weekly.json"), "w") json.dump(list(res), fd) fd.close()
def posts(): res = db.fetchall( "SELECT created::date, count(id) AS cnt " "FROM posts.posts " "WHERE created::date >= %s AND created::date <= %s AND private=false " "GROUP BY created::date " "ORDER BY created::date;", [start.date(), end.date()]) fd = open(os.path.join(settings.stat_path, "posts.json"), "w") json.dump(map(lambda o: [o[0].strftime("%Y-%m-%d"), o[1]], res), fd) fd.close()
def most_commented_weekly(): res = db.fetchall("SELECT u.login, count(c.id) cnt " "FROM posts.comments c " "JOIN posts.posts p ON p.id=c.post_id AND p.private=false " "JOIN users.logins u ON u.id=p.author " "WHERE p.created::date > now() - interval '7 days' " "AND c.author != p.author " "GROUP BY u.login ORDER BY cnt DESC LIMIT 20;") fd = open(os.path.join(settings.stat_path, "most_commented_weekly.json"), "w") json.dump(list(res), fd) fd.close()
def tag_blacklist(self): if not self.id: return [] res = db.fetchall("SELECT t.to_user_id AS user_id, " "COALESCE(u.login, '') AS login, " "array_agg(t.tag) AS tags " "FROM posts.tags_blacklist t " "LEFT OUTER JOIN users.logins u ON t.to_user_id=u.id " "WHERE t.user_id=%s " "GROUP BY t.to_user_id, u.login;", [self.id]) return res
def index_posts(): res = db.fetchall("SELECT u.id user_id, u.login, p.id post_id, " "p.type post_type, " "p.title, p.tags, p.text, p.created, p.private " "FROM posts.posts p " "JOIN users.logins u ON p.author=u.id;") for r in res: post = dict(r) post['post_id'] = b26(post['post_id']) _id = post['post_id'] es.index(index='point-posts', id=_id, doc_type='post', body=post)
def index_comments(): res = db.fetchall("SELECT u.id user_id, u.login, c.post_id, " "p.type post_type, " "c.comment_id, c.text, c.created, p.private " "FROM posts.comments c " "JOIN users.logins u ON c.author=u.id " "JOIN posts.posts p ON p.id=c.post_id;") for r in res: c = dict(r) c['post_id'] = b26(c['post_id']) _id = '%s-%s' % (c['post_id'], c['comment_id']) es.index(index='point-comments', id=_id, doc_type='post', body=c)
def most_commented_weekly(): res = db.fetchall( "SELECT u.login, count(c.id) cnt " "FROM posts.comments c " "JOIN posts.posts p ON p.id=c.post_id AND p.private=false " "JOIN users.logins u ON u.id=p.author " "WHERE p.created::date > now() - interval '7 days' " "AND c.author != p.author " "GROUP BY u.login ORDER BY cnt DESC LIMIT 20;") fd = open(os.path.join(settings.stat_path, "most_commented_weekly.json"), "w") json.dump(list(res), fd) fd.close()
def comments_avg(): res = db.fetchall("SELECT " "CASE WHEN d::int=1 THEN 6 ELSE d::int-2 END d, " "round(avg(cnt))::int " "FROM " "(SELECT to_char(created::date, 'd') d, count(id) cnt " "FROM posts.comments " "WHERE created::date > now() - interval '1 month' " "GROUP BY created::date) AS wt " "GROUP BY d ORDER BY d;") fd = open(os.path.join(settings.stat_path, "comments_avg.json"), "w") json.dump(map(lambda r: r[1], sorted(res, key=lambda r: r[0])), fd) fd.close()
def delete(self): files = [] for ff in db.fetchall("SELECT files FROM posts.comments " "WHERE post_id=%s;", [unb26(self.id)]): if ff and ff['files']: files.extend(ff['files']) for ff in db.fetchone("DELETE FROM posts.posts WHERE id=%s " "RETURNING files;", [unb26(self.id)]): if ff: files.extend(ff) redis = RedisPool(settings.storage_socket) redis.delete('cmnt_cnt.%s' % unb26(self.id)) for f in files: remove_attach(f)
def outgoing_subscription_requests(self): if not self.id: return [] res = db.fetchall("SELECT u.id, u.login, i.name, i.gender, i.avatar " "FROM subs.requests s " "JOIN users.logins u ON u.id=s.to_user_id " "LEFT OUTER JOIN users.info i ON i.id=s.to_user_id " "WHERE s.user_id=%s;", [self.id]) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar']}) users.append(u) return users
def subscribers(self): if not self.id: return [] res = db.fetchall("SELECT u.id, u.login, i.name, i.gender, i.avatar " "FROM subs.users s " "JOIN users.logins u ON u.id=s.user_id " "LEFT OUTER JOIN users.info i ON i.id=s.user_id " "WHERE s.to_user_id=%s;", [self.id]) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar']}) users.append(u) return sorted(users, key=lambda u: u.login.lower())
def _set_private(self): res = [u['id'] for u in \ db.fetchall("SELECT user_id AS id FROM subs.users " "WHERE to_user_id=%(id)s " "EXCEPT " "SELECT to_user_id AS id FROM users.whitelist " "WHERE user_id=%(id)s;", {'id': self.id})] db.perform("DELETE FROM subs.users " "WHERE user_id=ANY(%s) AND to_user_id=%s;", [res, self.id]) db.batch("INSERT INTO subs.requests VALUES(%(u)s, %(to)s);", [{'u':u, 'to':self.id} for u in res]) db.perform("DELETE FROM subs.posts s USING posts.posts p " "WHERE s.post_id=p.id " "AND s.user_id=ANY(%s) AND p.author=%s;", [res, self.id]) db.perform("DELETE FROM subs.tags_user " "WHERE to_user_id=%s AND user_id=ANY(%s);", [self.id, res])
def update_task(self): if not self.id: return res = db.fetchall( "SELECT created FROM posts.posts WHERE author=%s " "ORDER BY created DESC LIMIT 10;", [self.id]) now = datetime.now() timestamps = [] for p in res: timestamps.append(timestamp(p['created'])) if len(timestamps) < 2: self.update_at(now + timedelta(seconds=settings.feed_max_update_timeout)) return lp = self.last_published() tz = timezone(settings.timezone) newlp = tz.localize(datetime.fromtimestamp(int(max(timestamps)))) if newlp > lp: self.last_published(newlp) timestamps.append(timestamp(now)) timestamps.sort() deltas = [] for i in xrange(1, len(timestamps)): deltas.append(timestamps[i] - timestamps[i - 1]) delta = reduce(lambda mem, t: mem + t, deltas, 0) / len(deltas) + 60 if delta < settings.feed_min_update_timeout: delta = settings.feed_min_update_timeout if delta > settings.feed_max_update_timeout: delta = settings.feed_max_update_timeout update_at = now + timedelta(seconds=delta) self.update_at(update_at) del timestamps
def delete(self): files = [] for ff in db.fetchall("SELECT files FROM posts.comments " "WHERE post_id=%s;", [unb26(self.id)]): if ff and ff["files"]: files.extend(ff["files"]) for ff in db.fetchone("DELETE FROM posts.posts WHERE id=%s " "RETURNING files;", [unb26(self.id)]): if ff: files.extend(ff) redis = RedisPool(settings.storage_socket) redis.delete("cmnt_cnt.%s" % unb26(self.id)) for f in files: remove_attach(f) es = elasticsearch.Elasticsearch() try: es.delete(index="point-posts", doc_type="post", id=self.id) except elasticsearch.exceptions.NotFoundError: pass
def blacklisters(self): if not self.id: return [] res = db.fetchall(""" SELECT u.id, u.login, i.name, i.gender, i.avatar FROM users.blacklist AS bl INNER JOIN users.logins AS u ON bl.user_id = u.id LEFT OUTER JOIN users.info AS i ON i.id = bl.user_id WHERE bl.to_user_id=%s; """, [self.id]) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar']}) users.append(u) users.sort(key=lambda u: u.login.lower()) return users
def update_task(self): if not self.id: return res = db.fetchall("SELECT created FROM posts.posts WHERE author=%s " "ORDER BY created DESC LIMIT 10;", [self.id]) now = datetime.now() timestamps = [] for p in res: timestamps.append(timestamp(p['created'])) if len(timestamps) < 2: self.update_at(now + timedelta(seconds=settings.feed_max_update_timeout)) return lp = self.last_published() tz = timezone(settings.timezone) newlp = tz.localize(datetime.fromtimestamp(int(max(timestamps)))) if newlp > lp: self.last_published(newlp) timestamps.append(timestamp(now)) timestamps.sort() deltas = [] for i in xrange(1, len(timestamps)): deltas.append(timestamps[i] - timestamps[i-1]) delta = reduce(lambda mem, t: mem+t, deltas, 0) / len(deltas) + 60 if delta < settings.feed_min_update_timeout: delta = settings.feed_min_update_timeout if delta > settings.feed_max_update_timeout: delta = settings.feed_max_update_timeout update_at = now + timedelta(seconds=delta) self.update_at(update_at) del timestamps
def delete(self): files = [] for ff in db.fetchall("SELECT files FROM posts.comments " "WHERE post_id=%s;", [unb26(self.id)]): if ff and ff['files']: files.extend(ff['files']) for ff in db.fetchone("DELETE FROM posts.posts WHERE id=%s " "RETURNING files;", [unb26(self.id)]): if ff: files.extend(ff) redis = RedisPool(settings.storage_socket) redis.delete('cmnt_cnt.%s' % unb26(self.id)) for f in files: remove_attach(f) es = elasticsearch.Elasticsearch() try: es.delete(index='point-posts', doc_type='post', id=self.id) except elasticsearch.exceptions.NotFoundError: pass
def unread_comments_count(self, ptype=None): if not self.id: return 0 if not hasattr(self, '_unread_comments'): self._unread_comments = {} if not self._unread_comments: res = db.fetchall("SELECT type, count(post_id) AS cnt " "FROM posts.unread_comments " "WHERE user_id=%s GROUP BY type;", [self.id]) self._unread_comments = { c['type']: c['cnt'] for c in res } if ptype: try: return self._unread_comments[ptype] except KeyError: return 0 else: return reduce(lambda memo, cnt: memo + cnt, self._unread_comments.values(), 0)
def unread_comments_count(self, ptype=None): if not self.id: return 0 if not hasattr(self, '_unread_comments'): self._unread_comments = {} if not self._unread_comments: res = db.fetchall( "SELECT type, count(post_id) AS cnt " "FROM posts.unread_comments " "WHERE user_id=%s GROUP BY type;", [self.id]) self._unread_comments = {c['type']: c['cnt'] for c in res} if ptype: try: return self._unread_comments[ptype] except KeyError: return 0 else: return reduce(lambda memo, cnt: memo + cnt, self._unread_comments.values(), 0)
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall("SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall( "SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def get_ulogin_accounts(self): if not self.id: return [] return db.fetchall("SELECT * FROM users.ulogin_accounts WHERE id=%s;", [self.id])
def handle_message(self, channel, data): tmpl = {} #if channel == 'msg' and 'author' in data: #redis = RedisPool(settings.redis_socket) #avatar = redis.get('avatar32.%s' % data['author']) #if not avatar: #av_path = os.path.join(settings.avatars_path, '32', #'%s.png' % data['author']) #if not os.path.exists(av_path): #av_path = os.path.join(settings.avatars_path, '32.png') #avfd = open(av_path) #avatar = 'data:image/png;base64,%s' % b64encode(avfd.read()) #avfd.close() #data['avatar'] = avatar if channel == 'confirm': if 'type' not in data or data['type'] != 'xmpp' or \ not 'address' in data or not data['address'].strip(): return body = xmpp_template('confirm_code', settings.lang, None, **data) out = { 'to': data['address'], 'body': body['body'], '_authorize': True } self.xout.push(json.dumps(out)) return if channel == 'remember': body = xmpp_template('remember', settings.lang, None, **data) out = { 'to': data['address'], 'body': body['body'], '_authorize': True } self.xout.push(json.dumps(out)) return if not isinstance(data['to'], (list, tuple)): data['to'] = [data['to']] res = db.fetchall("SELECT * FROM users.profile_im " "WHERE id=ANY(%s);", [data['to']]) profile = {r['id']:dict(r) for r in res} for i in data['to']: cdata = data.copy() user = ImUser.from_data(i, None) try: jid = user.get_active_account('xmpp') if not jid: continue except TypeError: continue if i not in profile: profile[i] = user.profile_defaults() if profile[i]['off']: continue lang = user.get_profile('lang') cut = None if ('type' not in cdata or cdata['type'] != 'feed') and \ 'cut' in data and 'text' in data: cut = user.get_profile('im.cut') if cut and len(cdata['text']) > cut-3: cdata['text'] = cdata['text'][:cut] + '...' if cut: ctmpl = xmpp_template("%s_%s"%(channel, cdata['a']), lang, 'html', **cdata) else: if not lang in tmpl: tmpl[lang] = xmpp_template("%s_%s"%(channel, cdata['a']), lang, 'html', **cdata) ctmpl = tmpl[lang] if profile[i]['xhtml']: out = {'to':jid, 'body':ctmpl['body'], 'html':ctmpl['html']} else: out = {'to':jid, 'body':ctmpl['body']} if 'post_id' in cdata and 'comment_id' in cdata: out['_msg_id'] = 'post_%s_%s_%s' % (i, cdata['post_id'], cdata['comment_id']) elif 'post_id' in cdata: out['_msg_id'] = 'post_%s_%s' % (i, cdata['post_id']) #if channel == 'msg': # if profile[i]['post_resource']: # out['_resource'] = '#%s' % data['id'] # elif profile[i]['user_resource']: # out['_resource'] = '@%s' % data['author'] self.xout.push(json.dumps(out))
def recommended_users(self): res = db.fetchall("SELECT u.id, u.login FROM posts.recommendations r " "JOIN users.logins u ON u.id=r.user_id " "WHERE post_id=%s AND comment_id=0;", [unb26(self.id)]) return [ User.from_data(r[0], r[1]) for r in res ]
def recipients(self): res = db.fetchall("SELECT u.id, u.login FROM posts.recipients r " "JOIN users.logins u ON u.id=r.user_id " "WHERE r.post_id=%s;", [unb26(self.id)]) return [ User.from_data(r[0], r[1]) for r in res ]
def updates(self): return db.fetchall("SELECT created, text FROM posts.updates " "WHERE post_id=%s;", [unb26(self.id)])
def comments(self, last=False, all=False, offset=None, limit=None, cuser=None): if last: lim = " LIMIT %d" % limit if limit else '' offset = 0 order = ' DESC' elif all: lim = '' offset = 0 order = ' ASC' else: if not offset: offset = 0 lim = " LIMIT %d" % limit if limit else '' order = ' ASC' if isinstance(cuser, User) and cuser.id: res = db.fetchall("SELECT c.comment_id, c.to_comment_id," "c.author AS user_id, " "CASE WHEN c.anon_login IS NOT NULL " "THEN c.anon_login ELSE u1.login END AS login," "c.created at time zone %%s AS created, " "c.text, c.files, " "c.updated at time zone %%s AS updated, " "CASE WHEN rc.comment_id IS NOT NULL " "THEN true ELSE false END AS is_rec, " "ur.user_id AS recommended, " "ub.user_id AS bookmarked, " "ui1.name, ui1.avatar " "FROM posts.comments c " "JOIN users.logins u1 ON c.author=u1.id " "LEFT OUTER JOIN users.info ui1 " "ON ui1.id=c.author " "LEFT OUTER JOIN posts.recommendations rc " "ON rc.post_id=c.post_id " "AND rc.rcid=c.comment_id " "LEFT OUTER JOIN posts.recommendations ur " "ON ur.user_id=%%s " "AND ur.post_id=c.post_id " "AND ur.comment_id=c.comment_id " "LEFT OUTER JOIN posts.bookmarks ub " "ON ub.user_id=%%s " "AND ub.post_id=c.post_id " "AND ub.comment_id=c.comment_id " "WHERE c.post_id=%%s AND c.comment_id>=%%s " "ORDER BY c.created%s%s;" % (order, lim), [self.tz, self.tz, cuser.id, cuser.id, unb26(self.id), offset]) else: res = db.fetchall("SELECT c.comment_id, c.to_comment_id," "c.author AS user_id, ui1.name," "CASE WHEN c.anon_login IS NOT NULL " "THEN c.anon_login ELSE u1.login END AS login," "c.created at time zone %%s AS created, " "c.text, c.files, " "c.updated at time zone %%s AS updated, " "CASE WHEN rc.comment_id IS NOT NULL " "THEN true ELSE false END AS is_rec, " "false AS recommended, " "false AS bookmarked, " "ui1.avatar " "FROM posts.comments c " "JOIN users.logins u1 ON c.author=u1.id " "LEFT OUTER JOIN users.info ui1 " "ON ui1.id=c.author " "LEFT OUTER JOIN posts.recommendations rc " "ON rc.post_id=c.post_id " "AND rc.rcid=c.comment_id " "WHERE c.post_id=%%s AND c.comment_id>=%%s " "ORDER BY c.created%s%s;" % (order, lim), [self.tz, self.tz, unb26(self.id), offset]) if last: res.reverse() if cuser: unr = db.fetchall("SELECT comment_id FROM posts.unread_comments " "WHERE user_id=%s AND post_id=%s;", [cuser.id, unb26(self.id)]) unread = { r['comment_id']: 1 for r in unr } else: unread = {} comments = [] for c in res: author = User.from_data(c['user_id'], c['login'], info={'name': c['name'], 'avatar': c['avatar']}) unr = True if c['comment_id'] in unread else False comment = Comment.from_data(self, id=c['comment_id'], to_comment_id=c['to_comment_id'], author=author, created=c['created'], text=c['text'], recommended=c['recommended'], bookmarked=c['bookmarked'], is_rec=c['is_rec'], files=c['files'], updated=c['updated'], unread=unr) comments.append(comment) if not limit and not offset: redis = RedisPool(settings.storage_socket) redis.set('cmnt_cnt.%s' % unb26(self.id), len(comments)) return comments
def handle_message(self, channel, data): tmpl = {} #if channel == 'msg' and 'author' in data: #redis = RedisPool(settings.redis_socket) #avatar = redis.get('avatar32.%s' % data['author']) #if not avatar: #av_path = os.path.join(settings.avatars_path, '32', #'%s.png' % data['author']) #if not os.path.exists(av_path): #av_path = os.path.join(settings.avatars_path, '32.png') #avfd = open(av_path) #avatar = 'data:image/png;base64,%s' % b64encode(avfd.read()) #avfd.close() #data['avatar'] = avatar if channel == 'confirm': if 'type' not in data or data['type'] != 'xmpp' or \ not 'address' in data or not data['address'].strip(): return body = xmpp_template('confirm_code', settings.lang, None, **data) out = { 'to': data['address'], 'body': body['body'], '_authorize': True } self.xout.push(json.dumps(out)) return if channel == 'remember': body = xmpp_template('remember', settings.lang, None, **data) out = { 'to': data['address'], 'body': body['body'], '_authorize': True } self.xout.push(json.dumps(out)) return if not isinstance(data['to'], (list, tuple)): data['to'] = [data['to']] res = db.fetchall( "SELECT * FROM users.profile_im " "WHERE id=ANY(%s);", [data['to']]) profile = {r['id']: dict(r) for r in res} for i in data['to']: cdata = data.copy() user = ImUser.from_data(i, None) try: jid = user.get_active_account('xmpp') if not jid: continue except TypeError: continue if i not in profile: profile[i] = user.profile_defaults() if profile[i]['off']: continue lang = user.get_profile('lang') cut = None if ('type' not in cdata or cdata['type'] != 'feed') and \ 'cut' in data and 'text' in data: cut = user.get_profile('im.cut') if cut and len(cdata['text']) > cut - 3: cdata['text'] = cdata['text'][:cut] + '...' if cut: ctmpl = xmpp_template("%s_%s" % (channel, cdata['a']), lang, 'html', **cdata) else: if not lang in tmpl: tmpl[lang] = xmpp_template("%s_%s" % (channel, cdata['a']), lang, 'html', **cdata) ctmpl = tmpl[lang] if profile[i]['xhtml']: out = {'to': jid, 'body': ctmpl['body'], 'html': ctmpl['html']} else: out = {'to': jid, 'body': ctmpl['body']} if 'post_id' in cdata and 'comment_id' in cdata: out['_msg_id'] = 'post_%s_%s_%s' % (i, cdata['post_id'], cdata['comment_id']) elif 'post_id' in cdata: out['_msg_id'] = 'post_%s_%s' % (i, cdata['post_id']) #if channel == 'msg': # if profile[i]['post_resource']: # out['_resource'] = '#%s' % data['id'] # elif profile[i]['user_resource']: # out['_resource'] = '@%s' % data['author'] self.xout.push(json.dumps(out))