def _get_pages(published_only=True): if published_only: return db.select( 'select * from pages where website_id=? and draft=? order by id desc', ctx.website.id, False) return db.select('select * from pages where website_id=? order by id desc', ctx.website.id)
def get_default_cv(uid): cvs = None while not cvs: cvs = db.select('select * from resumes where user_id=?', uid) if not cvs: cv_id = db.next_str() db.insert('resumes', id=cv_id, user_id=uid, title='My Resume', version=0) db.insert('sections', id=db.next_str(), user_id=uid, resume_id=cv_id, display_order=0, kind='about', title='About', description='', version=0) cv = cvs[0] cv.sections = db.select( 'select * from sections where resume_id=? order by display_order', cv.id) for section in cv.sections: section.style = _SECTIONS_STYLE[section.kind] section.entries = db.select( 'select * from entries where section_id=? order by display_order', section.id) return cv
def update_timeline(): i = ctx.request.input() client = _create_client() data = client.parse_signed_request(i.signed_request) if data is None: raise StandardError('Error!') user_id = data.get('uid', '') auth_token = data.get('oauth_token', '') if not user_id or not auth_token: return dict(error='bad_signature') expires = data.expires client.set_access_token(auth_token, expires) u = db.select('select since_id from users where id=?', user_id)[0] kw = dict(uid=user_id, count=100, trim_user=1) since_id = u.since_id if since_id: kw['since_id'] = since_id timeline = client.statuses.user_timeline.get(**kw) statuses = timeline.statuses count = 0 if statuses: since_id = str(statuses[0].id) for st in statuses: info = record.parse(st.text) if info: t, ymd = _parse_datetime(st.created_at) r = dict(id=st.id, user_id=user_id, text=st.text, created_at=t, rdistance=info[0], rtime=info[1], rdate=ymd) if not db.select('select id from records where id=?', st.id): db.insert('records', **r) count = count + 1 db.update_kw('users', 'id=?', user_id, since_id = since_id) return dict(count=count, since_id=since_id)
def _get_articles(page=1, limit=20, published_only=True): offset = (page - 1) * limit if published_only: return db.select( 'select * from articles where website_id=? and draft=? order by id desc limit ?,?', ctx.website.id, False, offset, limit) return db.select( 'select * from articles where website_id=? order by id desc limit ?,?', ctx.website.id, offset, limit)
def _get_articles_by_category(category_id, page=1, limit=20, published_only=True): offset = (page - 1) * limit if published_only: return db.select( 'select * from articles where category_id=? and draft=? order by id desc limit ?,?', category_id, False, offset, limit) return db.select( 'select * from articles where category_id=? order by id desc limit ?,?', category_id, offset, limit)
def get_settings(kind=None, remove_prefix=False): ''' Get all settings. ''' settings = dict() if kind: L = db.select('select name, value from settings where kind=?', kind) else: L = db.select('select name, value from settings') for s in L: key = s.name[s.name.find('_')+1:] if remove_prefix else s.name settings[key] = s.value return settings
def _init_theme(path, model): theme = get_active_theme() model['__theme_path__'] = '/themes/%s' % theme model['__get_theme_path__'] = lambda _templpath: 'themes/%s/%s' % (theme, _templpath) model['__menus__'] = db.select('select * from menus order by display_order, name') model.update(get_settings('site')) if not 'site_name' in model: model['site_name'] = 'iTranswarp' if not '__title__' in model: model['__title__'] = model['site_name'] model['ctx'] = ctx model['__layout_categories__'] = db.select('select * from categories order by display_order, name') return 'themes/%s/%s' % (theme, path), model
def get_default_cv(uid): cvs = None while not cvs: cvs = db.select('select * from resumes where user_id=?', uid) if not cvs: cv_id = db.next_str() db.insert('resumes', id=cv_id, user_id=uid, title='My Resume', version=0) db.insert('sections', id=db.next_str(), user_id=uid, resume_id=cv_id, display_order=0, kind='about', title='About', description='', version=0) cv = cvs[0] cv.sections = db.select('select * from sections where resume_id=? order by display_order', cv.id) for section in cv.sections: section.style = _SECTIONS_STYLE[section.kind] section.entries = db.select('select * from entries where section_id=? order by display_order', section.id) return cv
def featured_poems(): total = db.select_int('select count(id) as num from poem where ilike>=100') s = set() while len(s)<5: s.add(random.randint(0, total-1)) L = [] for n in s: L.extend(db.select('select * from poem where ilike>=100 order by id limit ?,?', n, 1)) total = db.select_int('select count(id) from poem where ilike<100') s = set() while len(s)<5: s.add(random.randint(0, total-1)) for n in s: L.extend(db.select('select * from poem where ilike<100 order by id limit ?,?', n, 1)) return dict(poems=L)
def callback(): i = ctx.request.input(code="") code = i.code client = _create_client() r = client.request_access_token(code) logging.info("access token: %s" % json.dumps(r)) access_token, expires_in, uid = r.access_token, r.expires_in, r.uid client.set_access_token(access_token, expires_in) u = client.users.show.get(uid=uid) logging.info("got user: %s" % uid) users = db.select("select * from users where id=?", uid) user = dict( name=u.screen_name, image_url=u.avatar_large or u.profile_image_url, statuses_count=u.statuses_count, friends_count=u.friends_count, followers_count=u.followers_count, verified=u.verified, verified_type=u.verified_type, auth_token=access_token, expired_time=expires_in, ) if users: db.update_kw("users", "id=?", uid, **user) else: user["id"] = uid db.insert("users", **user) _make_cookie(uid, access_token, expires_in) raise seeother("/")
def m_poem_comments(id, ps='1'): ''' GET /m_poem_comments/{poem_id}/{page} ''' page = int(ps) if page < 1 or page > 100: return r'{"error":"invalid_page","description":"invalid page."}' page_size = 20 offset = page_size * (page - 1) L = db.select('select * from poem where id=?', id) if not L: return r'{"error":"not_found","description":"poem not found."}' poem = L[0] comments = list( web.ctx.db.select('poem_comment', where='poem_id=$pid', vars=dict(pid=id), offset=offset, limit=page_size + 1, order='creation_time desc')) has_next = len(comments) > page_size if has_next: comments = comments[:-1] now = datetime.datetime.now() return dict( page = page, \ next = has_next, \ time = time.time(), \ comments = [dict(user_name=c.user_name, user_image=c.user_image, user_url=c.user_url, content=c.content, posted_before=_posted_before(now, c.creation_time)) for c in comments] \ )
def get_comments_desc(ref_id, max_results=20, after_id=None): ''' Get comments by page. Args: ref_id: reference id. max_results: the max results. after_id: comments after id. Returns: comments as list. ''' if max_results < 1 or max_results > 100: raise ValueError('bad max_results') if after_id: return db.select('select * from comments where ref_id=? and id < ? order by id desc limit ?', ref_id, after_id, max_results) return db.select('select * from comments where ref_id=? order by id desc limit ?', ref_id, max_results)
def attachments(): i = ctx.request.input(action='', page='1', size='20') if i.action == 'delete': delete_attachment(i.id) raise seeother('attachments') page = int(i.page) size = int(i.size) num = db.select_int('select count(id) from attachments where website_id=?', ctx.website.id) if page < 1: raise APIValueError('page', 'page invalid.') if size < 1 or size > 100: raise APIValueError('size', 'size invalid.') offset = (page - 1) * size atts = db.select( 'select * from attachments where website_id=? order by id desc limit ?,?', ctx.website.id, offset, size + 1) next = False if len(atts) > size: atts = atts[:-1] next = True return Template('templates/attachments.html', attachments=atts, page=page, previous=page > 2, next=next)
def callback(): i = ctx.request.input(code='') code = i.code client = _create_client() r = client.request_access_token(code) logging.info('access token: %s' % json.dumps(r)) access_token, expires_in, uid = r.access_token, r.expires_in, r.uid client.set_access_token(access_token, expires_in) u = client.users.show.get(uid=uid) logging.info('got user: %s' % uid) users = db.select('select * from users where id=?', uid) user = dict(name=u.screen_name, \ image_url=u.avatar_large or u.profile_image_url, \ statuses_count=u.statuses_count, \ friends_count=u.friends_count, \ followers_count=u.followers_count, \ verified=u.verified, \ verified_type=u.verified_type, \ auth_token=access_token, \ expired_time=expires_in) if users: db.update_kw('users', 'id=?', uid, **user) else: user['id'] = uid db.insert('users', **user) _make_cookie(uid, access_token, expires_in) raise seeother('/')
def do_register(): i = ctx.request.input(name='', email='', passwd='') name = i.name.strip() if not name: raise APIError('value', '', 'Invalid name.') email = i.email.strip().lower() check_email(email) passwd = i.passwd check_md5_passwd(passwd) us = db.select('select * from users where email=?', email) if us: raise APIError('register', '', 'Email already registered.') uid = db.next_str() db.insert('users', id=uid, name=name, email=email, passwd=passwd, version=0) make_session_cookie(uid, passwd) return {'id': uid}
def _get_setting(website_id, kind, key, default=u''): ss = db.select('select value from settings where name=? and website_id=?', '%s:%s' % (kind, key), website_id) if ss: v = ss[0].value if v: return v return default
def find_by(cls, where, *args): ''' Find by where clause and return list. ''' sql = 'select * from %s where %s' % (cls.__table__, where) d = db.select(sql, *args) return [cls(**i) for l in d]
def _get_text(website_id, kind, key, default): ss = db.select('select value from texts where name=? and website_id=?', '%s:%s' % (kind, key), website_id) if ss: v = ss[0].value if v: return v return default
def _load_app_info(): global _APP_ID, _APP_SECRET, _ADMIN_PASS for s in db.select("select * from settings"): if s.id == "app_id": _APP_ID = s.value if s.id == "app_secret": _APP_SECRET = s.value if s.id == "admin_pass": _ADMIN_PASS = s.value
def _load_app_info(): global _APP_ID, _APP_SECRET, _ADMIN_PASS for s in db.select('select * from settings'): if s.id == 'app_id': _APP_ID = s.value if s.id == 'app_secret': _APP_SECRET = s.value if s.id == 'admin_pass': _ADMIN_PASS = s.value
def _get_site(host): wss = db.select('select * from websites where domain=?', host) if wss: ws = wss[0] if ws.disabled: logging.debug('website is disabled: %s' % host) raise forbidden() return ws raise notfound()
def archives(): years = db.select('select distinct `year` from `blogs` order by created desc') if not years: raise notfound() xblogs = list() for y in years: blogs = Blogs.find_by('where `year` = ? order by created desc', y.get('year')) xblogs.append(blogs) return dict(xblogs=xblogs)
def get_text(name, default=''): ''' Get text by name. Return default value '' if not exist. ''' ss = db.select('select value from texts where name=?', name) if ss: v = ss[0].value if v: return v return default
def dynasty_page(dyn_id): ''' GET /dynasty/{dynasty_id}/{page} Show dynasty page. ''' dynasty = get_dynasty(dyn_id) dynasties = get_dynasties() poets = db.select('select * from poet where dynasty_id=? order by pinyin', dyn_id) return dict(title=dynasty.name, dynasty=dynasty, dynasties=dynasties, poets=poets)
def _get_Text(username): lb = db.select('select * from labels where username=? and weight>0', username) txt = [] wei = [] for i in range(0, len(lb)): print lb[i]['label'] txt.append(lb[i]['label']) wei.append(lb[i]['weight']) return {"text": txt, "weight":wei }
def get_menus(): ''' Get navigation menus as list, each element is a Dict object. ''' menus = db.select('select * from menus order by display_order, name') if menus: return menus current = time.time() menu = Dict(id=db.next_str(), name=u'Home', description=u'', type='latest_articles', display_order=0, ref='', url='/latest', creation_time=current, modified_time=current, version=0) db.insert('menus', **menu) return [menu]
def archives(): sql = 'SELECT YEAR(`created`) AS `year`, `id`, `title`, `created` FROM `blogs` ORDER BY `created` DESC' blogs = db.select(sql) if not blogs: raise notfound() xblogs = OrderedDict() for blog in blogs: if not blog['year'] in xblogs: xblogs[blog['year']] = [blog] else: xblogs[blog['year']].append(blog) return dict(xblogs=xblogs)
def featured_poems(): total = db.select_int('select count(id) as num from poem where ilike>=100') s = set() while len(s) < 5: s.add(random.randint(0, total - 1)) L = [] for n in s: L.extend( db.select( 'select * from poem where ilike>=100 order by id limit ?,?', n, 1)) total = db.select_int('select count(id) from poem where ilike<100') s = set() while len(s) < 5: s.add(random.randint(0, total - 1)) for n in s: L.extend( db.select( 'select * from poem where ilike<100 order by id limit ?,?', n, 1)) return dict(poems=L)
def getUsers(): u = _check_cookie() if u is None: return dict(error='failed', redirect='/signin') client = _create_client() client.set_access_token(u.auth_token, u.expired_time) try: output = db.select('select * from users') result = {'users':output} return result except APIError, e: return dict(error='failed')
def get_poems(poet_id, page=1): ''' Return poems of page N (N=1, 2, 3) and has_next. ''' if page < 1 or page > 100: raise ValueError('invalid page.') offset = PAGE_SIZE * (page - 1) maximum = PAGE_SIZE + 1 L = db.select('select * from poem where poet_id=? order by name_pinyin limit ?,?', poet_id, offset, maximum) if len(L) == maximum: return L[:-1], True return L, False
def update_timeline(): i = ctx.request.input() client = _create_client() data = client.parse_signed_request(i.signed_request) if data is None: raise StandardError('Error!') user_id = data.get('uid', '') auth_token = data.get('oauth_token', '') if not user_id or not auth_token: return dict(error='bad_signature') expires = data.expires client.set_access_token(auth_token, expires) u = db.select('select since_id from users where id=?', user_id)[0] kw = dict(uid=user_id, count=100, trim_user=1) since_id = u.since_id if since_id: kw['since_id'] = since_id timeline = client.statuses.user_timeline.get(**kw) statuses = timeline.statuses count = 0 if statuses: since_id = str(statuses[0].id) for st in statuses: info = record.parse(st.text) if info: t, ymd = _parse_datetime(st.created_at) r = dict(id=st.id, user_id=user_id, text=st.text, created_at=t, rdistance=info[0], rtime=info[1], rdate=ymd) if not db.select('select id from records where id=?', st.id): db.insert('records', **r) count = count + 1 db.update_kw('users', 'id=?', user_id, since_id=since_id) return dict(count=count, since_id=since_id)
def _get_Image(label, username): li = db.select('select id from labels where label=? and username=?', label, username) print li imgs = db.select('select * from pictures where labelID=?', li[0]['id']) src = [] dec = [] for i in range(0, len(imgs)): full = imgs[i]['picUrl'] tm = imgs[i]['upTime'] cn = re.findall('[a-z0-9A-Z]+.jpg', full) full = cn[0] tm = tm[4:10] + tm[25:30] src.append(full) dec.append(tm) return {"src": src, "dec": dec}
def _get_wikipages(wiki, returnDict=False): ''' Get all wiki pages and return as tree. Each wiki page contains only id, website_id, wiki_id, parent_id, display_order, name and version. The return value is virtual root node. ''' pages = db.select( 'select id, website_id, wiki_id, parent_id, display_order, name, version from wiki_pages where wiki_id=?', wiki.id) pdict = dict(((p.id, p) for p in pages)) if returnDict: return pdict proot = Dict(id='') _tree_iter(pdict, proot) return proot.children
def _get_categories(): cats = db.select('select * from categories where website_id=? order by display_order, name', ctx.website.id) if not cats: logging.info('create default uncategorized...') current = time.time() uncategorized = Dict(id=db.next_str(), \ website_id=ctx.website.id, \ name='Uncategorized', description='', \ locked=True, display_order=0, \ creation_time=current, modified_time=current, \ version=0) db.insert('categories', **uncategorized) cats = [uncategorized] return cats
def statistics(): i = ctx.request.input() client = _create_client() data = client.parse_signed_request(i.signed_request) if data is None: return dict(error='bad_signature') user_id = data.uid last_6m = (datetime.now() - timedelta(days=180)).timetuple() dt = last_6m.tm_year * 10000 + last_6m.tm_mon * 100 + last_6m.tm_mday return dict( \ days = 180, \ start = dt, \ data = db.select('select text, rdistance, rtime, rdate from records where rdate>? and user_id=?', dt, user_id) \ )
def get_poems(poet_id, page=1): ''' Return poems of page N (N=1, 2, 3) and has_next. ''' if page < 1 or page > 100: raise ValueError('invalid page.') offset = PAGE_SIZE * (page - 1) maximum = PAGE_SIZE + 1 L = db.select( 'select * from poem where poet_id=? order by name_pinyin limit ?,?', poet_id, offset, maximum) if len(L) == maximum: return L[:-1], True return L, False
def delete_entry(): _check_user() i = ctx.request.input(id='') if not i.id: raise APIError('value', 'id', 'id is empty.') entry = db.select_one('select * from entries where id=?', i.id) _check_user_id(entry.user_id) entries = db.select('select * from entries where section_id=? order by display_order', entry.section_id) display_ids = [en.id for en in entries if en.id != i.id] db.update('delete from entries where id=?', i.id) n = 0 for i in display_ids: db.update('update entries set display_order=? where id=?', n, i) db.update('update sections set version=version+1 where id=?', entry.section_id) return dict(result=True)
def _get_wikipages(wiki, returnDict=False): """ Get all wiki pages and return as tree. Each wiki page contains only id, website_id, wiki_id, parent_id, display_order, name and version. The return value is virtual root node. """ pages = db.select( "select id, website_id, wiki_id, parent_id, display_order, name, version from wiki_pages where wiki_id=?", wiki.id, ) pdict = dict(((p.id, p) for p in pages)) if returnDict: return pdict proot = Dict(id="") _tree_iter(pdict, proot) return proot.children
def _get_categories(): cats = db.select( 'select * from categories where website_id=? order by display_order, name', ctx.website.id) if not cats: logging.info('create default uncategorized...') current = time.time() uncategorized = Dict(id=db.next_str(), \ website_id=ctx.website.id, \ name='Uncategorized', description='', \ locked=True, display_order=0, \ creation_time=current, modified_time=current, \ version=0) db.insert('categories', **uncategorized) cats = [uncategorized] return cats
def _get_settings(website_id, kind, removePrefix=True): ''' Return key, value as dict. ''' L = db.select( 'select name, value from settings where kind=? and website_id=?', kind, website_id) d = {} if removePrefix: l = len(kind) + 1 for s in L: d[s.name[l:]] = s.value else: for s in L: d[s.name] = s.value return d
def delete_section(): _check_user() i = ctx.request.input(id='') if not i.id: raise APIError('value', 'id', 'id is empty.') section = db.select_one('select * from sections where id=?', i.id) _check_user_id(section.user_id) cv = get_default_cv(ctx.user.id) sections = db.select('select * from sections where resume_id=? order by display_order', cv.id) display_ids = [s.id for s in sections if s.id != i.id] db.update('delete from entries where section_id=?', i.id) db.update('delete from sections where id=?', i.id) n = 0 for i in display_ids: db.update('update sections set display_order=? where id=?', n, i) db.update('update resumes set version=version+1 where id=?', cv.id) return dict(result=True)
def _check_cookie(): try: b64cookie = ctx.request.cookies[_COOKIE] cookie = base64.b64decode(b64cookie.replace('_', '=')) uid, expires, md5 = cookie.split(':', 2) if int(expires) < time.time(): return L = db.select('select * from users where id=?', uid) if not L: return u = L[0] s = '%s:%s:%s:%s' % (uid, str(u.auth_token), expires, _SALT) if md5 != hashlib.md5(s).hexdigest(): return return u except BaseException: pass
def _wrapper(*args, **kw): user = None uid = extract_session_cookie() if uid: users = db.select('select * from users where id=?', uid) if users: user = users[0] logging.info('load user ok from cookie.') if user is None: auth = ctx.request.header('AUTHORIZATION') logging.debug('get authorization header: %s' % auth) if auth and auth.startswith('Basic '): user = http_basic_auth(auth[6:]) ctx.user = user try: return func(*args, **kw) finally: del ctx.user
def do_signin(): i = ctx.request.input(remember='') email = i.email.strip().lower() passwd = i.passwd remember = i.remember if not email or not passwd: return dict(email=email, remember=remember, error=_('Bad email or password')) us = db.select('select id, passwd from users where email=?', email) if not us: return dict(email=email, remember=remember, error=_('Bad email or password')) u = us[0] if passwd != u.passwd: logging.debug('expected passwd: %s' % u.passwd) return dict(email=email, remember=remember, error=_('Bad email or password')) expires = time.time() + _SESSION_COOKIE_EXPIRES if remember else None make_session_cookie(u.id, passwd, expires) ctx.response.delete_cookie(_COOKIE_SIGNIN_REDIRECT) raise seeother(ctx.request.cookie(_COOKIE_SIGNIN_REDIRECT, '/'))
def delete_entry(): _check_user() i = ctx.request.input(id='') if not i.id: raise APIError('value', 'id', 'id is empty.') entry = db.select_one('select * from entries where id=?', i.id) _check_user_id(entry.user_id) entries = db.select( 'select * from entries where section_id=? order by display_order', entry.section_id) display_ids = [en.id for en in entries if en.id != i.id] db.update('delete from entries where id=?', i.id) n = 0 for i in display_ids: db.update('update entries set display_order=? where id=?', n, i) db.update('update sections set version=version+1 where id=?', entry.section_id) return dict(result=True)
def attachments(): i = ctx.request.input(action='', page='1', size='20') if i.action=='delete': delete_attachment(i.id) raise seeother('attachments') page = int(i.page) size = int(i.size) num = db.select_int('select count(id) from attachments where website_id=?', ctx.website.id) if page < 1: raise APIValueError('page', 'page invalid.') if size < 1 or size > 100: raise APIValueError('size', 'size invalid.') offset = (page - 1) * size atts = db.select('select * from attachments where website_id=? order by id desc limit ?,?', ctx.website.id, offset, size+1) next = False if len(atts)>size: atts = atts[:-1] next = True return Template('templates/attachments.html', attachments=atts, page=page, previous=page>2, next=next)