def xmpp_template(tmpl_name, _lang=None, _type=None, **context): if not _lang: if env.user and env.user.id: _lang = env.user.get_profile('lang') else: _lang = settings.lang if not _type: try: if env.user and env.user.get_profile('xhtml'): _type = 'html' except KeyError: pass tmpl_dict = {} if _type == 'html': try: tmpl_path = os.path.join(_lang, 'xhtml', tmpl_name + '.tmpl') tmpl = jinja_env.get_template(tmpl_path) log.debug('Template %s' % tmpl_path) tmpl_dict['html'] = tmpl.render(context, settings=settings) except TemplateNotFound: tmpl_dict['html'] = None log.error('Template %s not found' % tmpl_path) finally: pass tmpl_path = os.path.join(_lang, 'text', tmpl_name + '.tmpl') tmpl = jinja_env.get_template(tmpl_path) tmpl_dict['body'] = tmpl.render(context, settings=settings) return tmpl_dict
def confirm_account(self, code): res = db.fetchone( "SELECT id, user_id, type, address, code " "FROM users.accounts_unconfirmed " "WHERE code=%s;", [code.lower()]) if not res: return False if res['user_id'] != self.id or res['code'] != str(code).lower(): return False db.perform("DELETE FROM users.accounts_unconfirmed WHERE id=%s;", [res['id']]) try: db.perform( "INSERT INTO users.accounts (user_id, type, address) " "VALUES (%s, %s, %s);", [self.id, res['type'], res['address']]) except IntegrityError: log.error("%s %s already exists" % (res['type'], res['address'])) return False cache_del("addr_id_login:%s" % res['address']) return True
def thumb(name, size, orig=False): fd = open(src) img = Image.open(fd) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], src)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) if orig: (w, h) = img.size if w <= settings.media_size[0] and h <= settings.media_size[1]: shutil.copyfile(src, os.path.join(dest, name)) return img.thumbnail(size, Image.ANTIALIAS) img.save(os.path.join(dest, name), fmt, **img.info)
def fetch(self): if self._posts: return if not self.get_url(): raise InvalidFeedUrl log.info('Feed #%s fetch from %s' % (self.id, self.get_url())) proc = process(self.get_url()) if proc.get_error(): log.error("Feed #%s fetch error: %s %s" % \ (self.id, self.get_url(), proc.get_error())) if not proc.entries(): #if 'status' in d and d['status'] < 400: # raise InvalidFeedType #else: raise FeedFetchError info = proc.get_info() for param in ['name', 'about', 'homepage']: if info[param] and info[param] != self.get_info(param): self.set_info(param, info[param]) if self.id and self.info_changed(): self.save() self._posts = [Post(None, self, **p) for p in proc.entries()] log.info("Feed #%s: %s entries fetched from %s" % \ (self.id, len(self._posts), self.get_url()))
def reg_invite_set_login(value): sess = env.user.session() if not sess.data(): env.user.session(reg_invite_set_login, key=value) return 'Please enter your nickname' if not sess['key']: env.user.session_destroy() return 'Fail' env.user.session_destroy() if value.startswith('@'): value = value[1:] if not validate_nickname(value): return xmpp_template('reg_invalid') try: users.register(login=value, accounts=[('xmpp', env.jid)]) redis = RedisPool(settings.storage_socket) redis.delete('invite:%s' % sess['key']) return xmpp_template('reg_ok', login=value) except UserExists: return 'User @%s already exists.' % value except UserError, e: log.error('%s: %s' % (e.__class__.__name__, e.message)) return e.message
def confirm_account(self, code): res = db.fetchone("SELECT id, user_id, type, address, code " "FROM users.accounts_unconfirmed " "WHERE code=%s;", [code.lower()]) if not res: return False if res['user_id'] != self.id or res['code'] != str(code).lower(): return False db.perform("DELETE FROM users.accounts_unconfirmed WHERE id=%s;", [res['id']]) try: db.perform("INSERT INTO users.accounts (user_id, type, address) " "VALUES (%s, %s, %s);", [self.id, res['type'], res['address']]) except IntegrityError: log.error("%s %s already exists" % (res['type'], res['address'])) return False cache_del("addr_id_login:%s" % res['address']) return True
def xmpp_template(tmpl_name, _lang=None, _type=None, **context): if not _lang: if env.user and env.user.id: _lang = env.user.get_profile('lang') else: _lang = settings.lang if not _type: try: if env.user and env.user.get_profile('xhtml'): _type = 'html' except KeyError: pass tmpl_dict = {} if _type == 'html': try: tmpl_path = os.path.join(_lang, 'xhtml', tmpl_name+'.tmpl') tmpl = jinja_env.get_template(tmpl_path) log.debug('Template %s' % tmpl_path) tmpl_dict['html'] = tmpl.render(context, settings=settings) except TemplateNotFound: tmpl_dict['html'] = None log.error('Template %s not found' % tmpl_path) finally: pass tmpl_path = os.path.join(_lang, 'text', tmpl_name+'.tmpl') tmpl = jinja_env.get_template(tmpl_path) tmpl_dict['body'] = tmpl.render(context, settings=settings) return tmpl_dict
def listen_queue(self): try: data = self.xout.pop() if data: data = json.loads(data) except Exception, e: log.error('%s %s %s' % (e.__class__.__name__, e.message, type(data), data)) data = None
def __init__(self): self.queue = Queue('imgq', settings.imgproc_socket) log.info('imgproc worker started') while True: data = self.queue.pop() if data and isinstance(data, dict): fn = data['fn'] del data['fn'] try: handlers[fn](**data) except Exception, e: log.error(traceback.format_exc())
def mail(to, body, subject='', template=None, html=False, \ attachments=None, **context): """ E-mail sender. Required settings: smtp_host, smtp_port, smtp_from, smtp_auth_required. If smtp_auth_required is set to true, you should also define smtp_login and smtp_password. """ smtp = smtplib.SMTP(settings.smtp_host, port=settings.smtp_port) smtp.ehlo() if settings.smtp_auth_required: smtp.starttls() smtp.ehlo() smtp.login(settings.smtp_login, settings.smtp_password) else: smtp.ehlo() if attachments: msg = MIMEMultipart() for path in attachments: try: fp = open(path, 'rb') data = fp.read() fp.close() except IOError, e: log.error('Attach %s: %s' % (path, e.message)) continue ctype, encoding = mimetypes.guess_type(path) if ctype is None or encoding is not None: ctype = 'application/octet-stream' maintype, subtype = ctype.split('/', 1) if maintype == 'text': part = MIMEText(data) elif maintype == 'image': part = MIMEImage(data) elif maintype == 'audio': part = MIMEAudio(data) else: part = MIMEBase(maintype, subtype) part.set_payload(data) part.add_header('Content-Disposition', 'attachment', filename=os.path.basename(path)) msg.attach(part)
def handle_post(self, data): """Handle post """ if data['private']: return if len(data['tags']) == 0 or 'point' not in data['tags']: return tagset = set(data['tags']) if tagset & FR_TAGS: issue_type = 'enhancement' elif tagset & BUG_TAGS: issue_type = 'bug' else: return if cache_get('issue-post:%s' % data['post_id']): return text = template('report.md', **data) args = { 'kind': issue_type, 'title': data['text'][:100], 'content': text, } bb = Bitbucket(settings.api_login, settings.api_password, settings.api_slug) issue = Issue(bb) status, resp = issue.create(**args) try: env.user = User('login', 'support') except UserNotFound: return reply = template('reply.txt', issue=resp['local_id']) try: add_comment(data['post_id'], None, text=reply, dont_subscribe=True, force=True) except (PostError, CommentError), e: log.error(e) return
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() path = os.path.join(settings.thumbnail_path, hash[:2], hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: dirname = os.path.join(settings.thumbnail_path, hash[:2]) try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def thumbnail(url): log.debug('- URL %s %s' % (type(url), url)) hash = md5(url).hexdigest() dirname = os.path.join(settings.thumbnail_path, hash[:2]) path = os.path.join(dirname, hash) if os.path.isfile(path) and os.stat(path) > 0: log.debug('%s: thumbnail exists' % path) return log.info('Making thumbnail %s %s' % (path, url)) if cache_get('thumbnail:%s' % hash): return cache_store('thumbnail:%s' % hash, 1, 60) try: try: os.mkdir(dirname) except OSError, e: if e.errno == 17: log.debug('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) else: log.warn('OSError %s: %s %s' % (e.errno, e.strerror, dirname)) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] resp = opener.open(url) #resp = urllib2.urlopen(url) buf = StringIO(resp.read()) img = Image.open(buf) if img.size[0] * img.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (img.size[0], img.size[1], url)) return img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF': img.seek(0) #img = img.copy() img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info)
def _fn(*args, **kwargs): try: try: return fn(*args, **kwargs) except Exception as e: if settings.debug: log.error(traceback.format_exc()) raise e except UserNotFound: body = render_string('/user-not-found.html') return Response(body, code=NotFound.code, message=NotFound.message) except PostAuthorError: body = render_string('/blog-denied.html') return Response(body, code=Forbidden.code, message=Forbidden.message) except SubscribeError: body = render_string('/post-denied.html') return Response(body, code=Forbidden.code, message=Forbidden.message) except PostReadonlyError: body = render_string('/post-readonly.html') return Response(body, code=Forbidden.code, message=Forbidden.message) except PostNotFound: body = render_string('/post-not-found.html') return Response(body, code=NotFound.code, message=NotFound.message) except CommentNotFound: body = render_string('/comment-not-found.html') return Response(body, code=NotFound.code, message=NotFound.message) except CommentEditingForbiddenError: body = render_string('/comment-past-editing.html') return Response(body, code=Forbidden.code, message=Forbidden.message) except NotAuthorized: raise Forbidden except AlreadyAuthorized: raise Forbidden except PostLimitError: body = render_string('/post-interval-exceeded.html') return Response(body, code=Forbidden.code, message=Forbidden.message)
def __init__(self): proctitle('worker') log.info('worker started with PID=%s' % os.getpid()) self.route = prepare_route(route) self.qin = Queue('xin', settings.queue_socket) self.qout = Queue('xout', settings.queue_socket) while True: data = self.qin.pop() if data: try: data = json.loads(data) gevent.spawn(self.handle_message, data) except ValueError, err: log.error("%s: %s" % (err.__class__.__name__, err.message))
def posts(self, limit=None): posts = [] lp = self.last_published() for p in self._posts: try: if self.id and lp and p.created <= lp: continue except TypeError: log.error('-- created "%s" <> lp "%s"' % (p.created, lp)) raise FeedFetchError posts.append(p) posts.sort(lambda a, b: int(timestamp(a.created) - timestamp(b.created))) if limit is not None: return posts[-abs(int(limit)):] return posts
def delete(self): res = db.fetchone( "DELETE FROM posts.comments " "WHERE post_id=%s AND comment_id=%s " "RETURNING files;", [unb26(self.post.id), self.id], ) if res and res["files"]: for f in res["files"]: remove_attach(f) redis = RedisPool(settings.storage_socket) redis.decr("cmnt_cnt.%s" % unb26(self.post.id)) try: es = elasticsearch.Elasticsearch() try: es.delete(index="point-comments", doc_type="post", id="%s-%s" % (self.post.id, self.id)) except elasticsearch.exceptions.NotFoundError: pass except elasticsearch.ConnectionError, e: log.error("Elasticsearch: %s" % e)
def posts(self, limit=None): posts = [] lp = self.last_published() for p in self._posts: try: if self.id and lp and p.created <= lp: continue except TypeError: log.error('-- created "%s" <> lp "%s"' % (p.created, lp)) raise FeedFetchError posts.append(p) posts.sort( lambda a, b: int(timestamp(a.created) - timestamp(b.created))) if limit is not None: return posts[-abs(int(limit)):] return posts
def delete(self): res = db.fetchone("DELETE FROM posts.comments " "WHERE post_id=%s AND comment_id=%s " "RETURNING files;", [unb26(self.post.id), self.id]) if res and res['files']: for f in res['files']: remove_attach(f) redis = RedisPool(settings.storage_socket) redis.decr('cmnt_cnt.%s' % unb26(self.post.id)) try: es = elasticsearch.Elasticsearch() try: es.delete(index='point-comments', doc_type='post', id='%s-%s' % (self.post.id, self.id)) except elasticsearch.exceptions.NotFoundError: pass except elasticsearch.ConnectionError, e: log.error('Elasticsearch: %s' % e)
def update_feed(self, id): try: feed = Feed(id) except FeedNotFound: log.error('Feed #%s does not exist. Skipped.' % id) return redis = RedisPool(settings.storage_socket) try: feed.fetch() redis.delete('feed:retries:%s' % feed.id) for p in feed.posts(): if not p.id: if p.tags: p.tags.insert(0, 'news') else: p.tags = ['news'] add_post(p) log.info('Feed #%s: %s new entries saved' % \ (feed.id, len(feed.posts()))) feed.update_task() except FeedFetchError: retries = redis.incr('feed:retries:%s' % feed.id) log.error('Feed #%s: %s retries failed' % (feed.id, retries)) if retries > settings.feed_retries: redis.delete('feed:retries:%s' % feed.id) return timeout = settings.feed_retry_timeout * retries feed.update_at(datetime.now() + timedelta(seconds=timeout)) except InvalidFeedType: redis.delete('feed:retries:%s' % feed.id) feed.update_at(datetime.now() + \ timedelta(seconds=settings.feed_max_update_timeout))
def __init__(self): proctitle("worker") log.info("worker started with PID=%s" % os.getpid()) self.route = prepare_route(route) self.qin = Queue("xin", settings.queue_socket) self.qout = Queue("xout", settings.queue_socket) while True: data = self.qin.pop() if data: try: data = json.loads(data) data_type = data.get("type", "msg") if data_type == "msg": method = self.handle_message elif data_type == "tune": method = self.handle_tune gevent.spawn(method, data) except ValueError, err: log.error("%s: %s" % (err.__class__.__name__, err.message))
def save(self): if not self.login: raise UserError("Cannot save anonymous user") is_new = False # create user if not self.id: if not self.login or not validate_nickname(self.login): raise UserError('Invalid Login: "******"' % self.login) self.id = db.fetchone( "INSERT INTO users.logins (login, type) " "VALUES (%s, %s) RETURNING id;", [self.login, self.type])[0] db.perform("INSERT INTO users.info (id, name) VALUES (%s, %s);", [self.id, self.login]) db.perform( "INSERT INTO users.profile (id, private, lang) " "VALUES (%s, false, 'en');", [self.id]) self.accounts_add = self.accounts is_new = True if not is_new: try: if self._private == True: self._set_private() elif self._private == False: self._set_public() except AttributeError: pass # save accounts for acc in self.accounts_add: try: if len(acc) == 3: db.perform( "INSERT INTO users.accounts_unconfirmed " "(user_id, type, address, code) " "VALUES (%s, %s, %s, %s);", [self.id, acc[0], acc[1], acc[2]]) else: db.perform( "INSERT INTO users.accounts " "(user_id, type, address) " "VALUES (%s, %s, %s);", [self.id, acc[0], acc[1]]) except IntegrityError: log.error("%s %s already exists" % (acc[0], acc[1])) self.accounts_add = [] for type, address in self.accounts_del: db.perform( "DELETE FROM users.accounts WHERE " "user_id=%s AND type=%s AND address=%s;", [self.id, type, address]) db.perform( "DELETE FROM users.accounts_unconfirmed WHERE " "user_id=%s AND type=%s AND address=%s;", [self.id, type, address]) self.accounts_del = [] # save profile if self.profile_upd: for table in self.profile_upd: f = [] for k in self.profile_upd[table]: f.append("%s=%%(%s)s" % (k, k)) try: try: db.perform("INSERT INTO %s (id) VALUES (%%s);" % \ table, [self.id]) except IntegrityError: pass db.perform("UPDATE %s SET %s WHERE id=%s;" % \ (table, ','.join(f), self.id), self.profile_upd[table]) cache_del('profile:%s:%s' % (table, self.id)) except ProgrammingError: raise KeyError except DataError: raise ValueError self.profile_upd = {} if self.info_upd: f = [] for k in self.info_upd: #if not self.info_upd[k]: # self.info_upd[k] = None f.append("%s=%%(%s)s" % (k, k)) db.perform("UPDATE users.info SET %s WHERE id=%s;" % \ (','.join(f), self.id), self.info_upd) self.info_upd = {} cache_del('userinfo:%s' % self.id) if self.password: db.perform("UPDATE users.logins SET password=%s WHERE id=%s;", (self.password, self.id))
'/%d.html' % code, '/50x.html', 'geweb/50x.html'], error=e) except TemplateNotFound, e: response = 'No error template found' except Exception, e: code = InternalServerError.code message = InternalServerError.message trace = traceback.format_exc() tb = inspect.trace()[-1][0] if isinstance(trace, str): trace = trace.decode('utf-8') log.error("%s: %s" % (code, trace)) subject = 'Error at %s: %s' % (settings.domain, e.__class__.__name__) body = render('geweb/report.html', code=code, message=message, protocol=env.request.protocol, host=env.request.host, uri=env.request.uri, method=env.request.method, params=env.request.args().iteritems(), headers=env.request.headers(), globals=tb.f_globals.iteritems(), locals=tb.f_locals.iteritems(), exception=e, trace=trace) if settings.debug: response = Response(body, code=code, message=message) else:
def __init__(self, environ): self._headers = { h[5:].lower().replace('_', '-'): val \ for h, val in environ.iteritems() \ if h.startswith('HTTP_') } self.protocol = self.header('x-forwarded-proto') or \ environ['wsgi.url_scheme'] or 'http' self.host = environ['HTTP_HOST'] self.method = environ['REQUEST_METHOD'].upper() self.path = environ['PATH_INFO'] self.remote_host = self.header('X-Forwarded-For') or \ environ['REMOTE_ADDR'] try: self.remote_port = int(environ['REMOTE_PORT']) except (TypeError, ValueError): self.remote_port = None self.user_agent = environ['HTTP_USER_AGENT'] self.referer = environ['HTTP_REFERER'] if 'HTTP_REFERER' in environ else '' self.is_xhr = self.header('X-Requested-With') == 'XMLHttpRequest' self._args = {} self._files = {} self.query_string = environ['QUERY_STRING'] self.uri = '%s?%s' % (self.path, self.query_string) \ if self.query_string else self.path if self.method in ('GET', 'HEAD'): self._args = urlparse.parse_qs(environ['QUERY_STRING']) elif self.method in ('POST', 'PUT', 'DELETE'): ctype = self.header('Content-Type') if not ctype or ctype.startswith('application/x-www-form-urlencoded'): _buf = environ['wsgi.input'].read() self._args = urlparse.parse_qs(_buf) elif ctype.startswith('multipart/form-data'): form = FieldStorage(fp=environ['wsgi.input'], environ=environ, keep_blank_values=True) for field in form.list: try: if field.filename: pos = field.filename.rfind('/') if pos == -1: pos = field.filename.rfind('\\') filename = field.filename[pos+1:] try: if not isinstance(self._args[field.name], (list, tuple)): self._args[field.name] = [self._args[field.name]] self._args[field.name].append(filename) except KeyError: self._args[field.name] = filename tmpfile = md5("%s%s" % (filename, datetime.now().isoformat())).hexdigest() try: upload_dir = settings.upload_dir except AttributeError: upload_dir = '/tmp' # FIXME: get from environment tmpfile_path = os.path.join(upload_dir, tmpfile) try: if not isinstance(self._files[field.name], (list, tuple)): self._files[field.name] = [self._files[field.name]] self._files[field.name].append(tmpfile_path) except KeyError: self._files[field.name] = tmpfile_path fd = open(tmpfile_path, 'w') while True: b = field.file.read(4096) if b == '': break fd.write(b) fd.close() log.info('Upload %s: %s' % (field.name, field.filename)) else: if not field.value: continue try: if not isinstance(self._args[field.name], (list, tuple)): self._args[field.name] = [self._args[field.name]] self._args[field.name].append(field.value) except KeyError: self._args[field.name] = field.value except IOError, e: log.error('Cannot write %s: %s' % \ (self._files[field.name], e.strerror)) del form
def destroy(self): try: os.unlink(self.filename) except OSError, e: log.error('unlink %s: %s' % (self.filename, e.strerror))
elif maintype == 'audio': part = MIMEAudio(data) else: part = MIMEBase(maintype, subtype) part.set_payload(data) part.add_header('Content-Disposition', 'attachment', filename=os.path.basename(path)) msg.attach(part) elif html: if isinstance(body, unicode): body = body.encode('utf-8') msg = MIMEMultipart('alternative') msg.attach(MIMEText(body, 'html', 'utf-8')) else: msg = MIMEText(body) msg['Subject'] = subject msg['From'] = settings.smtp_from msg['To'] = to result = smtp.sendmail(settings.smtp_from, to, msg.as_string()) if result: log.error('SMTP', result) smtp.quit()
tmp_path = os.path.join(settings.upload_dir, "%s.%s" % (filename, randint(1000, 9999))) fd = open(tmp_path, 'w') fd.write(resp.read()) fd.close() resp.close() path = tmp_path except (urllib2.URLError, OSError), e: return {'status': 'fail', 'message': e.msg} source = Image.open(path) if source.size[0] * source.size[1] > settings.max_image_size: log.error('too big: %sx%s %s' % (source.size[0], source.size[1], path)) return if source.format == 'GIF': source.seek(0) log.debug('%s opened: %s' % (path, source)) w, h = source.size if w > h: box = ((w-h)/2, 0, (w+h)/2, h) elif w < h: box = (0, (h-w)/2, w, (h+w)/2) else: box = None
def save(self): if not self.login: raise UserError("Cannot save anonymous user") is_new = False # create user if not self.id: if not self.login or not validate_nickname(self.login): raise UserError('Invalid Login: "******"' % self.login) self.id = db.fetchone("INSERT INTO users.logins (login, type) " "VALUES (%s, %s) RETURNING id;", [self.login, self.type])[0] db.perform("INSERT INTO users.info (id, name) VALUES (%s, %s);", [self.id, self.login]) db.perform("INSERT INTO users.profile (id, private, lang) " "VALUES (%s, false, 'en');", [self.id]) self.accounts_add = self.accounts is_new = True if not is_new: try: if self._private == True: self._set_private() elif self._private == False: self._set_public() except AttributeError: pass # save accounts for acc in self.accounts_add: try: if len(acc) == 3: db.perform("INSERT INTO users.accounts_unconfirmed " "(user_id, type, address, code) " "VALUES (%s, %s, %s, %s);", [self.id, acc[0], acc[1], acc[2]]) else: db.perform("INSERT INTO users.accounts " "(user_id, type, address) " "VALUES (%s, %s, %s);", [self.id, acc[0], acc[1]]) except IntegrityError: log.error("%s %s already exists" % (acc[0], acc[1])) self.accounts_add = [] for type, address in self.accounts_del: db.perform("DELETE FROM users.accounts WHERE " "user_id=%s AND type=%s AND address=%s;", [self.id, type, address]) db.perform("DELETE FROM users.accounts_unconfirmed WHERE " "user_id=%s AND type=%s AND address=%s;", [self.id, type, address]) self.accounts_del = [] # save profile if self.profile_upd: for table in self.profile_upd: f = [] for k in self.profile_upd[table]: f.append("%s=%%(%s)s" % (k, k)) try: try: db.perform("INSERT INTO %s (id) VALUES (%%s);" % \ table, [self.id]) except IntegrityError: pass db.perform("UPDATE %s SET %s WHERE id=%s;" % \ (table, ','.join(f), self.id), self.profile_upd[table]) cache_del('profile:%s:%s' % (table, self.id)) except ProgrammingError: raise KeyError except DataError: raise ValueError self.profile_upd = {} if self.info_upd: f = [] for k in self.info_upd: #if not self.info_upd[k]: # self.info_upd[k] = None f.append("%s=%%(%s)s" % (k, k)) db.perform("UPDATE users.info SET %s WHERE id=%s;" % \ (','.join(f), self.id), self.info_upd) self.info_upd = {} cache_del('userinfo:%s' % self.id) if self.password: db.perform("UPDATE users.logins SET password=%s WHERE id=%s;", (self.password, self.id))
def save(self): if self.tags: self.tags = [ t[:64] for t in filter(None, self.tags)[:10] ] try: if not isinstance(self.files, (list, tuple)): self.files = None except AttributeError: self.files = None if self.id: db.perform("DELETE FROM posts.tags WHERE post_id=%s;", [unb26(self.id)]) if self.tags: for t in self.tags: if isinstance(t, str): t = t.decode('utf-8') db.perform("INSERT INTO posts.tags " "(post_id, user_id, tag) VALUES (%s, %s, %s);", [unb26(self.id), self.author.id, t]) db.perform("UPDATE posts.posts SET tags=%s, private=%s," "text=%s, edited=%s, archive=%s, pinned=%s, files=%s " "WHERE id=%s;", [self.tags, bool(self.private), self.text, self.edited, self.archive, self.pinned, self.files, unb26(self.id)]) else: if not self.created: self.created = datetime.now() res = db.fetchone("INSERT INTO posts.posts " "(author, type, private, tags, title, link, text, " "created, edited, archive, pinned, tune, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING id;", [self.author.id, self.type, bool(self.private), self.tags, self.title, self.link, self.text, self.created, self.edited, self.archive, self.pinned, Json(self.tune), self.files]) if not res: raise PostError self.id = b26(res[0]) if self.tags: for t in self.tags: try: db.perform("INSERT INTO posts.tags " "(post_id, user_id, tag) " "VALUES (%s, %s, %s);", [unb26(self.id), self.author.id, t]) except IntegrityError: pass try: es = elasticsearch.Elasticsearch() es.index(index='point-posts', id=self.id, doc_type='post', body={ 'post_id': self.id, 'post_type': self.type, 'created': self.created, 'private': self.private, 'user_id': self.author.id, 'login': self.author.login, 'title': self.title, 'tags': self.tags, 'text': self.text, }) except elasticsearch.ConnectionError, e: log.error('Elasticsearch: %s' % e)
def save(self): if not self.post.id: raise PostNotFound if isinstance(self.author, AnonymousUser): anon_login = self.author.login else: anon_login = None if not self.created: self.created = datetime.now() if isinstance(self.text, str): self.text = self.text.decode('utf-8', 'ignore') if self.archive and self.id: comment_id = self.id res = db.fetchone("INSERT INTO posts.comments " "(post_id, comment_id, author, created," "to_comment_id, anon_login, text, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING comment_id;", [unb26(self.post.id), self.id, self.author.id, self.created, self.to_comment_id, anon_login, self.text, self.files]) else: redis = RedisPool(settings.storage_socket) while True: try: comment_id = redis.incr('cmnt.%s' % self.post.id) res = db.fetchone("INSERT INTO posts.comments " "(post_id, comment_id, author, created," "to_comment_id, anon_login, text, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING comment_id;", [unb26(self.post.id), comment_id, self.author.id, self.created, self.to_comment_id, anon_login, self.text, self.files]) break except IntegrityError: pass if res: redis.incr('cmnt_cnt.%s' % unb26(self.post.id)) try: es = elasticsearch.Elasticsearch() es.index(index='point-comments', id='%s-%s' % (self.post.id, self.id), doc_type='post', body={ 'post_id': self.post.id, 'comment_id': self.id, 'post_type': self.post.type, 'created': self.created, 'private': self.post.private, 'user_id': self.author.id, 'login': self.author.login, 'text': self.text, }) except elasticsearch.ConnectionError, e: log.error('Elasticsearch: %s' % e)
def save(self): if self.tags: self.tags = [ t[:64] for t in filter(None, self.tags)[:10] ] try: if not isinstance(self.files, (list, tuple)): self.files = None except AttributeError: self.files = None if self.id: db.perform("DELETE FROM posts.tags WHERE post_id=%s;", [unb26(self.id)]) if self.tags: for t in self.tags: if isinstance(t, str): t = t.decode('utf-8') db.perform("INSERT INTO posts.tags " "(post_id, user_id, tag) VALUES (%s, %s, %s);", [unb26(self.id), self.author.id, t]) db.perform("UPDATE posts.posts SET tags=%s, private=%s," "text=%s, edited=%s, archive=%s, files=%s " "WHERE id=%s;", [self.tags, bool(self.private), self.text, self.edited, self.archive, self.files, unb26(self.id)]) else: if not self.created: self.created = datetime.now() res = db.fetchone("INSERT INTO posts.posts " "(author, type, private, tags, title, link, text, " "created, edited, archive, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING id;", [self.author.id, self.type, bool(self.private), self.tags, self.title, self.link, self.text, self.created, self.edited, self.archive, self.files]) if not res: raise PostError self.id = b26(res[0]) if self.tags: for t in self.tags: try: db.perform("INSERT INTO posts.tags " "(post_id, user_id, tag) " "VALUES (%s, %s, %s);", [unb26(self.id), self.author.id, t]) except IntegrityError: pass try: es = elasticsearch.Elasticsearch() es.index(index='point-posts', id=self.id, doc_type='post', body={ 'post_id': self.id, 'post_type': self.type, 'created': self.created, 'private': self.private, 'user_id': self.author.id, 'login': self.author.login, 'title': self.title, 'tags': self.tags, 'text': self.text, }) except elasticsearch.ConnectionError, e: log.error('Elasticsearch: %s' % e)
img.thumbnail(settings.thumbnail_size, Image.ANTIALIAS) img.save(path, fmt, **img.info) #except IOError, e: # log.error('IOError %s' % str(e)) # fd = open(path, 'w') # fd.close() except urllib2.HTTPError, e: log.warn('HTTPError %s: %s' % (e.code, url)) fd = open(path, 'w') fd.close() except urllib2.URLError, e: log.warn('URLError %s: %s' % (e.reason, url)) except Exception, e: log.error(traceback.format_exc()) finally: cache_del('thumbnail:%s' % hash) def _attach_image(src, dest, filename): def thumb(name, size, orig=False): fd = open(src) img = Image.open(fd) img.load() fmt = img.format if fmt == 'JPEG': img = _rotate(img) elif fmt == 'GIF':
def save(self, update=False): if not self.post.id: raise PostNotFound if isinstance(self.author, AnonymousUser): anon_login = self.author.login else: anon_login = None if not self.created: self.created = datetime.now() if isinstance(self.text, str): self.text = self.text.decode('utf-8', 'ignore') if update: res = db.perform(""" UPDATE posts.comments SET (text, updated) = (%s, now()) WHERE post_id = %s AND comment_id = %s; """, [self.text, unb26(self.post.id) if isinstance(self.post.id, basestring) else self.post.id, self.id]) comment_id = self.id else: if self.archive and self.id: comment_id = self.id res = db.fetchone("INSERT INTO posts.comments " "(post_id, comment_id, author, created," "to_comment_id, anon_login, text, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING comment_id;", [unb26(self.post.id), self.id, self.author.id, self.created, self.to_comment_id, anon_login, self.text, self.files]) else: redis = RedisPool(settings.storage_socket) while True: try: comment_id = redis.incr('cmnt.%s' % self.post.id) res = db.fetchone("INSERT INTO posts.comments " "(post_id, comment_id, author, created," "to_comment_id, anon_login, text, files) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING comment_id;", [unb26(self.post.id), comment_id, self.author.id, self.created, self.to_comment_id, anon_login, self.text, self.files]) break except IntegrityError: pass if res: redis.incr('cmnt_cnt.%s' % unb26(self.post.id)) try: es = elasticsearch.Elasticsearch() es.index(index='point-comments', id='%s-%s' % (self.post.id, self.id), doc_type='post', body={ 'post_id': self.post.id, 'comment_id': self.id, 'post_type': self.post.type, 'created': self.created, 'private': self.post.private, 'user_id': self.author.id, 'login': self.author.login, 'text': self.text, }) except elasticsearch.ConnectionError, e: log.error('Elasticsearch: %s' % e)
'geweb/50x.html' ], error=e) except TemplateNotFound, e: response = 'No error template found' except Exception, e: code = InternalServerError.code message = InternalServerError.message trace = traceback.format_exc() tb = inspect.trace()[-1][0] if isinstance(trace, str): trace = trace.decode('utf-8') log.error("%s: %s" % (code, trace)) subject = 'Error at %s: %s' % (settings.domain, e.__class__.__name__) body = render('geweb/report.html', code=code, message=message, protocol=env.request.protocol, host=env.request.host, uri=env.request.uri, method=env.request.method, params=env.request.args().iteritems(), headers=env.request.headers(), globals=tb.f_globals.iteritems(), locals=tb.f_locals.iteritems(), exception=e, trace=trace)
def register(): #raise Forbidden if env.user.id: raise AlreadyAuthorized sess = Session() info = sess['reg_info'] or {} print 'INFO', info if env.request.method == 'GET': try: del info['network'] del info['uid'] except (KeyError, TypeError): pass sess['reg_info'] = info sess.save() try: info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) except (KeyError, TypeError): info['birthdate'] = None return render('/auth/register.html', fields=ULOGIN_FIELDS, info=info) try: network = info['network'] if 'network' in info else None uid = info['uid'] if 'uid' in info else None except TypeError: network = None uid = None errors = [] for p in ['login', 'name', 'email', 'birthdate', 'location', 'about', 'homepage']: info[p] = env.request.args(p, '').decode('utf-8') info['gender'] = _gender(env.request.args('gender')) login = env.request.args('login', '').strip() if login and validate_nickname(login): try: u = User('login', login) if u.id: errors.append('login-in-use') except UserNotFound: pass elif login: errors.append('login-invalid') else: errors.append('login-empty') password = env.request.args('password') confirm = env.request.args('confirm') if not (network and uid): if not password: errors.append('password') elif password != confirm: errors.append('confirm') info['birthdate'] = parse_date(info['birthdate']) \ or datetime.now() - timedelta(days=365*16+4) if not network and not errors: try: text = env.request.args('recaptcha_response_field') challenge = env.request.args('recaptcha_challenge_field') resp = captcha.submit(challenge, text, settings.recaptcha_private_key, env.request.remote_host) if not resp.is_valid: errors.append('captcha') except urllib2.URLError, e: log.error('recaptcha fail: %s' % e) #errors.append('recaptcha-fail') except AddressNotFound: return Response(redirect='%s://%s/remember?fail=1' % \ (env.request.protocol, settings.domain))