def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict(db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict(db.fetchall( "SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def markdown_filter(environ, text, post=None, comment=None, img=False): if not text: return '' if settings.cache_markdown: key = 'md:' if post: key = '%s%s' % (key, post) if comment: key = '%s.%s' % (key, comment) else: key = '%s%s' % (key, md5(text.encode('utf-8')).hexdigest()) mdstring = cache_get(key) if mdstring: return mdstring mdstring = md.convert(text) # метод reset() вызывается, чтобы сбросить определение сносок из # экземпляра класса, иначе уже имеющиеся сноски попадут во все следующие # сконвертированные фрагменты HTML как сказано в # https://pythonhosted.org/Markdown/extensions/api.html#registerextension md.reset() if settings.cache_markdown: cache_store(key, mdstring, settings.cache_markdown) return mdstring
def subscriptions(self, type=None): if not self.id: return [] key = "subs:%s:%s" % (self.id, type or 'all') res = cache_get(key) if not res: values = [self.id] if type: type_filter = " AND u.type=%s" values.append(type) else: type_filter = '' res = db.fetchall("SELECT u.id, u.login, u.type, " "i.name, i.gender, i.avatar, i.homepage " "FROM subs.users s " "JOIN users.logins u ON u.id=s.to_user_id " "LEFT OUTER JOIN users.info i " "ON i.id=s.to_user_id " "WHERE s.user_id=%%s %s;" % type_filter, values) cache_store(res, 120) users = [] for r in res: u = User.from_data(r['id'], r['login'], info={'name': r['name'], 'gender': r['gender'], 'avatar': r['avatar'], 'homepage': r['homepage']}) users.append(u) return sorted(users, key=lambda u: u.login.lower())
def alias_list(self): aliases = {} if self.id: aliases['user'] = cache_get('aliases:%s' % self.id) if aliases['user'] is None: aliases['user'] = dict( db.fetchall( "SELECT alias, command FROM users.user_aliases " "WHERE user_id=%s;", [self.id])) cache_store('aliases:%s' % self.id, aliases['user']) aliases['global'] = cache_get('aliases:global') if aliases['global'] is None: aliases['global'] = dict( db.fetchall("SELECT alias, command FROM users.aliases;")) cache_store('aliases:global', aliases['global'], 300) return aliases
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone("SELECT name, email, xmpp, icq, skype, " "about, avatar, gender, " "birthdate, location, homepage, created " "FROM users.info WHERE id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None if param: try: return self.info[param] except KeyError: return None else: return self.info
def get(table, param): try: return self.profile[table][param] except KeyError: try: # FIXME: profile models db.perform("INSERT INTO %s (id) VALUES (%%s);" % \ table, [self.id]) except IntegrityError: pass res = cache_get('profile:%s:%s' % (table, self.id)) if res: self.profile[table] = res else: res = db.fetchone("SELECT * FROM %s WHERE id=%%s;" % \ table, [self.id]) log.debug('RES %s %s' % (table, res)) if res: self.profile[table] = dict(res) cache_store('profile:%s:%s' % (table, self.id), self.profile[table]) else: #try: # return cls._profile[param]['default'] #except KeyError: # return None return None # FIXME: remove recursive call try: return self.profile[table][param] except KeyError: cache_del('profile:%s:%s' % (table, self.id)) return get(table, param)
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone( "SELECT name, email, xmpp, icq, skype, " "about, avatar, gender, " "birthdate, location, homepage, created " "FROM users.info WHERE id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None if param: try: return self.info[param] except KeyError: return None else: return self.info
def next_update(self): if not self.id: return None dt = cache_get('feed:next_update:%s' % self.id) if not dt: return None return dateutil.parser.parse(dt)
def check_stoplist(text): slist = cache_get('stoplist') if not slist: slist = load_stoplist() for s in slist: if re.search(s, text, re.I): return True return False
def comments_count(self): c = cache_get('comments_count:%s' % self.id) if c: return c try: c = db.fetchone("SELECT count(id) FROM posts.comments " "WHERE author=%s;", [self.id])[0] cache_store('comments_count:%s' % self.id, c, 30) return c except IndexError: return 0
def readers_count(self, cache=True): if not cache: c = cache_get('readers_count:%s' % self.id) if c: return c try: c = db.fetchone("SELECT count(user_id) FROM subs.users " "WHERE to_user_id=%s;", [self.id])[0] cache_store('readers_count:%s' % self.id, c) return c except IndexError: return 0
def handle_post(self, data): """Handle post """ if data['private']: return if len(data['tags']) == 0 or 'point' not in data['tags']: return tagset = set(data['tags']) if tagset & FR_TAGS: issue_type = 'enhancement' elif tagset & BUG_TAGS: issue_type = 'bug' else: return if cache_get('issue-post:%s' % data['post_id']): return text = template('report.md', **data) args = { 'kind': issue_type, 'title': data['text'][:100], 'content': text, } bb = Bitbucket(settings.api_login, settings.api_password, settings.api_slug) issue = Issue(bb) status, resp = issue.create(**args) try: env.user = User('login', 'support') except UserNotFound: return reply = template('reply.txt', issue=resp['local_id']) try: add_comment(data['post_id'], None, text=reply, dont_subscribe=True, force=True) except (PostError, CommentError), e: log.error(e) return
def xhtmlim(environment, s): if not s: return '' if settings.cache_markdown: h = md5(s.encode('utf-8')).hexdigest() mdstring = cache_get('mdx:%s' % h) if mdstring: return mdstring mdstring = md.convert(s) if settings.cache_markdown: cache_store('mdx:%s' % h, mdstring, 3600) return mdstring
def markdown(text, img=False): if not text: return '' if settings.cache_markdown: h = md5(text.encode('utf-8')).hexdigest() mdstring = cache_get('md:%s' % h) if mdstring: return mdstring mdstring = md.convert(text) if settings.cache_markdown: cache_store('md:%s' % h, mdstring, 3600) return mdstring
def striphtml_filter(environment, s): if not s: return '' if settings.cache_markdown: h = md5(s.encode('utf-8')).hexdigest() mdstring = cache_get('h2t:%s' % h) if mdstring: return mdstring mdstring = striphtml(s) if settings.cache_markdown: cache_store('h2t:%s' % h, mdstring, 3600) return mdstring
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() path = os.path.join(settings.thumbnail_path, hash[:2], hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: dirname = os.path.join(settings.thumbnail_path, hash[:2]) try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() dirname = os.path.join(settings.thumbnail_path, hash[:2]) path = os.path.join(dirname, hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def get_url(self): if self._url: return self._url if self.id: key = "feed_url:%s" % self.id self._url = cache_get(key) if not self._url: res = db.fetchone("SELECT url FROM users.feeds WHERE id=%s;", [self.id]) if not res: raise FeedNotFound self._url = res[0] if self.id: cache_store(key, self._url, 3600) return self._url
def __init__(self, field, value=None): self._url = None if field == 'url': self._url = value elif isinstance(field, (str, unicode)): if field.startswith('http://') or field.startswith('https://'): self._url = field elif not value: raise InvalidFeedUrl if self._url: if not parse_url(self._url, exact=True): raise InvalidFeedUrl key = 'url_feed_id:%s' % self._url id = cache_get(key) if not id: res = db.fetchone("SELECT id FROM users.feeds WHERE url=%s;", [self._url]) if res: id = res['id'] cache_store(key, id) if not id: raise FeedNotFound try: User.__init__(self, long(id)) except UserNotFound: raise FeedNotFound else: try: User.__init__(self, field, value) except UserNotFound: raise FeedNotFound self._posts = []
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall( "SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def tags(self, limit=None, sort_by_name=False, all=False): if not self.id: return [] key = 'user_tags:%d:%s' % (self.id, (limit or 'all')) if not all: tags = cache_get(key) if tags: return tags order = 'tag ASC' if sort_by_name else 'cnt DESC' limit = ("LIMIT %d" % limit) if limit else '' tags = db.fetchall("SELECT tag, count(post_id) AS cnt " "FROM posts.tags WHERE user_id=%%s " "GROUP BY tag ORDER BY %s " "%s;" % (order, limit), [self.id]) cache_store(key, [dict(t) for t in tags], 60) return tags
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone( "SELECT i.name,p.private, p.deny_anonymous," "i.email, i.xmpp, i.icq, i.skype, i.about, " "i.avatar, i.gender, i.birthdate, i.location," "i.homepage, i.created " "FROM users.info i " "LEFT OUTER JOIN users.profile p " "ON (i.id = p.id) " "WHERE i.id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None self.info.update({'tune': self.get_tune()}) if param: try: return self.info[param] except KeyError: return None else: return self.info
def get_info(self, param=None): if not self.info: res = cache_get('userinfo:%s' % self.id) if res: for k in ('birthdate', 'created'): res[k] = dateutil.parser.parse(res[k]) if res[k] else None self.info = res else: res = db.fetchone("SELECT i.name,p.private, p.deny_anonymous," "i.email, i.xmpp, i.icq, i.skype, i.about, " "i.avatar, i.gender, i.birthdate, i.location," "i.homepage, i.created " "FROM users.info i " "LEFT OUTER JOIN users.profile p " "ON (i.id = p.id) " "WHERE i.id=%s;", [self.id]) if res: self.info = dict(res) res = dict(res) for k in ('birthdate', 'created'): res[k] = res[k].isoformat() if res[k] else None cache_store('userinfo:%s' % self.id, res) if not self.info: return None self.info.update({'tune': self.get_tune()}) if param: try: return self.info[param] except KeyError: return None else: return self.info
def rename(self, login): if not self.id: raise NotAuthorized if cache_get('renamed:%s' % self.id): raise RenameError if not validate_nickname(login): raise UserLoginError old_login = self.login self.login = login try: db.perform("UPDATE users.logins SET login=%s WHERE id=%s;", [login, self.id]) except IntegrityError: raise UserExists cache_store('renamed:%s' % self.id, 1, settings.user_rename_timeout) cache_del('id_login:%s' % old_login.lower()) for t in ACCOUNT_TYPES: for addr in self.get_accounts(t): cache_del("addr_id_login:%s" % addr)
def get_tune(self): return cache_get("user_tune:%s" % self.id)
def is_renamed(self): if not self.id: raise NotAuthorized return bool(cache_get('renamed:%s' % self.id))
def handle_subscription(self, presence): key = 'presence:%s:%s' % (presence['type'], presence['from'].bare) data = cache_get(key) if data: cache_del(key) self.send_message(**data)
def register(): #raise Forbidden if env.user.id: raise AlreadyAuthorized sess = Session() info = sess['reg_info'] or {} if env.request.method == 'GET': try: del info['network'] del info['uid'] except (KeyError, TypeError): pass sess['reg_info'] = info sess['reg_start'] = timestamp(datetime.now()) sess.save() try: info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) except (KeyError, TypeError): info['birthdate'] = None return render('/auth/register.html', fields=ULOGIN_FIELDS, info=info) if cache_get('reg-ok:%s' % env.request.remote_host): raise Forbidden hi1 = env.request.args('hi1') try: hi2 = int(env.request.args('hi2', 0)) except ValueError: hi2 = 0 #try: # h = hi2 / (timestamp(datetime.now()) - int(sess['reg_start'])) #except: # raise Forbidden #finally: # pass if hi2 < 5: raise Forbidden try: network = info['network'] if 'network' in info else None uid = info['uid'] if 'uid' in info else None except TypeError: network = None uid = None errors = [] for p in ['login', 'name', 'email', 'birthdate', 'location', 'about', 'homepage']: info[p] = env.request.args(p, '').decode('utf-8') info['gender'] = _gender(env.request.args('gender')) login = env.request.args('login', '').strip() if hi1 != login: raise Forbidden if login and validate_nickname(login): try: u = User('login', login) if u.id: errors.append('login-in-use') except UserNotFound: pass elif login: errors.append('login-invalid') else: errors.append('login-empty') password = env.request.args('password') confirm = env.request.args('confirm') if not (network and uid): if not password: errors.append('password') elif password != confirm: errors.append('confirm') info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) if not network and not errors: try: text = env.request.args('recaptcha_response_field') challenge = env.request.args('recaptcha_challenge_field') resp = captcha.submit(challenge, text, settings.recaptcha_private_key, env.request.remote_host) if not resp.is_valid: errors.append('captcha') except urllib2.URLError, e: log.error('recaptcha fail: %s' % e) #errors.append('recaptcha-fail') except AddressNotFound: return Response(redirect='%s://%s/remember?fail=1' % \ (env.request.protocol, settings.domain))
pass from point.core.user import User from importers.juick import Juick from point.util import cache_get, cache_store importer_types = {"juick": Juick} try: login, itype, path = sys.argv[1:4] except (IndexError, ValueError): sys.stderr.write("Usage: %s <login> <type> <path>\n" % sys.argv[0]) exit(1) try: imp = importer_types[itype](User("login", login), path) except KeyError: sys.stderr.write("%s: invalid source type\n" % itype) exit(1) for ext_id, post, comments in imp.posts(): post.archive, post.author, post.text, post.tags key = "imported:%s:%s:%s" % (itype, login.lower(), ext_id) print key if cache_get(key): continue post.save() for comment in comments: comment.save() cache_store(key, 1, settings.ids_cache_expire)
from importers.juick import Juick from point.util import cache_get, cache_store importer_types = { "juick": Juick } try: login, itype, path = sys.argv[1:4] except (IndexError, ValueError): sys.stderr.write("Usage: %s <login> <type> <path>\n" % sys.argv[0]) exit(1) try: imp = importer_types[itype](User("login", login), path) except KeyError: sys.stderr.write("%s: invalid source type\n" % itype) exit(1) for ext_id, post, comments in imp.posts(): post.archive, post.author, post.text, post.tags key = "imported:%s:%s:%s" % (itype, login.lower(), ext_id) print key if cache_get(key): continue post.save() for comment in comments: comment.save() cache_store(key, 1, settings.ids_cache_expire)
def __init__(self, field, value=None): self.id = None self.login = None self.accounts = [] self.accounts_add = [] self.accounts_del = [] self.profile = {} self.profile_upd = {} self.info = {} self.info_upd = {} self.password = None self._private = None self.redis = RedisPool(settings.storage_socket) if isinstance(field, (int, long)): self.id = field self.login = cache_get('login:%s' % field) if not self.login: res = db.fetchone("SELECT login FROM users.logins WHERE id=%s;", [field]) if not res: raise UserNotFound self.login = res[0] cache_store('login:%s' % field, self.login) return if not value: #raise UserNotFound # empty user return if field == 'login': r = cache_get('id_login:%s' % value.lower()) if r: try: self.id, self.login, self.type = r except ValueError: self.id, self.login = r self.type = 'user' else: res = db.fetchone("SELECT id, login, type FROM users.logins " "WHERE lower(login)=%s;", [str(value).lower()]) if not res: raise UserNotFound(value) self.id, self.login, self.type = res cache_store('id_login:%s' % value.lower(), [res[0], res[1], res[2]]) return r = cache_get('addr_id_login:%s' % value.lower()) if r: self.id, self.login = r else: res = db.fetchone("SELECT u.id, u.login FROM users.accounts a " "JOIN users.logins u ON u.id=a.user_id " "WHERE a.type=%s AND a.address=%s;", [field, value]) #, _cache=3600) if res: self.id, self.login = res cache_store('addr_id_login:%s' % value.lower(),[res[0], res[1]])
def __init__(self, field, value=None): self.id = None self.login = None self.accounts = [] self.accounts_add = [] self.accounts_del = [] self.profile = {} self.profile_upd = {} self.info = {} self.info_upd = {} self.password = None self._private = None self.redis = RedisPool(settings.storage_socket) if isinstance(field, (int, long)): self.id = field self.login = cache_get('login:%s' % field) if not self.login: res = db.fetchone( "SELECT login FROM users.logins WHERE id=%s;", [field]) if not res: raise UserNotFound self.login = res[0] cache_store('login:%s' % field, self.login) return if not value: #raise UserNotFound # empty user return if field == 'login': r = cache_get('id_login:%s' % value.lower()) if r: try: self.id, self.login, self.type = r except ValueError: self.id, self.login = r self.type = 'user' else: res = db.fetchone( "SELECT id, login, type FROM users.logins " "WHERE lower(login)=%s;", [str(value).lower()]) if not res: raise UserNotFound(value) self.id, self.login, self.type = res cache_store('id_login:%s' % value.lower(), [res[0], res[1], res[2]]) return r = cache_get('addr_id_login:%s' % value.lower()) if r: self.id, self.login = r else: res = db.fetchone("SELECT u.id, u.login FROM users.accounts a " "JOIN users.logins u ON u.id=a.user_id " "WHERE a.type=%s AND lower(a.address)=%s;", [field, value.lower()]) #, _cache=3600) if res: self.id, self.login = res cache_store('addr_id_login:%s' % value.lower(), [res[0], res[1]])
def register(): #raise Forbidden if env.user.id: raise AlreadyAuthorized sess = Session() info = sess['reg_info'] or {} if env.request.method == 'GET': try: del info['network'] del info['uid'] except (KeyError, TypeError): pass sess['reg_info'] = info sess['reg_start'] = timestamp(datetime.now()) sess.save() try: info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) except (KeyError, TypeError): info['birthdate'] = None return render('/auth/register.html', fields=ULOGIN_FIELDS, info=info) if cache_get('reg-ok:%s' % env.request.remote_host): raise Forbidden hi1 = env.request.args('hi1') try: hi2 = int(env.request.args('hi2', 0)) except ValueError: hi2 = 0 #try: # h = hi2 / (timestamp(datetime.now()) - int(sess['reg_start'])) #except: # raise Forbidden #finally: # pass if hi2 < 5: raise Forbidden try: network = info['network'] if 'network' in info else None uid = info['uid'] if 'uid' in info else None except TypeError: network = None uid = None errors = [] for p in [ 'login', 'name', 'email', 'birthdate', 'location', 'about', 'homepage' ]: info[p] = env.request.args(p, '').decode('utf-8') info['gender'] = _gender(env.request.args('gender')) login = env.request.args('login', '').strip() if hi1 != login: raise Forbidden if login and validate_nickname(login): try: u = User('login', login) if u.id: errors.append('login-in-use') except UserNotFound: pass elif login: errors.append('login-invalid') else: errors.append('login-empty') password = env.request.args('password') confirm = env.request.args('confirm') if not (network and uid): if not password: errors.append('password') elif password != confirm: errors.append('confirm') info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) if not network and not errors: try: text = env.request.args('recaptcha_response_field') challenge = env.request.args('recaptcha_challenge_field') resp = captcha.submit(challenge, text, settings.recaptcha_private_key, env.request.remote_host) if not resp.is_valid: errors.append('captcha') except urllib2.URLError, e: log.error('recaptcha fail: %s' % e) #errors.append('recaptcha-fail') except AddressNotFound: return Response(redirect='%s://%s/remember?fail=1' % \ (env.request.protocol, settings.domain))