def external_point_update(point, user): db_point = list(point_by_uuid(point.uuid) or []) if db_point: # !!! should we update lat, lon too? point.id = db_point[0].id web.query(""" UPDATE locations SET title=$title WHERE id = $id ; """, vars=point.copy()) # how to update tags??? # tags_insert(point, user, point.tags, deny_namespaces=[]) return "update" else: web.insert( "locations", lat=point.lat, lon=point.lon, title=point.title, uuid=point.uuid, user_id=user.id, origin=point.origin, added=point.added, #!!!? url=point.url) db_point = list(point_by_uuid(point.uuid))[0] tags_insert(db_point, user, point.tags, deny_namespaces=[]) point.id = db_point.id return "insert"
def delete_user_by_description(description): """ Description is used to temporary store some info""" web.query(""" DELETE FROM users WHERE description = $description """, vars=dict(description=description))
def project_update(project, user): if "lat" not in project: web.query(""" UPDATE locations SET title=$title, uuid=$uuid, origin=$origin, type='project' WHERE id = $id ; """, vars=project.copy()) else: web.query(""" UPDATE locations SET title=$title, uuid=$uuid, origin=$origin, lat=$lat, lon=$lon, type='project' WHERE id = $id ; """, vars=project.copy())
def remove_datasource_from_project(ds, project): web.query(""" DELETE FROM locations_datasources WHERE locations_datasources.location_id = $project_id AND locations_datasources.datasource_id = $ds_id ; """, vars=dict(ds_id=ds.id, project_id=project.id))
def clean(lifetime=2592000): """Delete all sessions older than lifetime We could call this on every request, but may as well just do it at some periodic interval. """ timestamp = int(time.time()) - lifetime web.query("DELETE FROM sessions WHERE timestamp < $timestamp", vars=locals()) return True
def external_point_update(point, user): db_point = list(point_by_uuid(point.uuid) or []) if db_point: # !!! should we update lat, lon too? point.id = db_point[0].id web.query(""" UPDATE locations SET title=$title WHERE id = $id ; """, vars=point.copy()) # how to update tags??? # tags_insert(point, user, point.tags, deny_namespaces=[]) return "update" else: web.insert("locations", lat=point.lat, lon=point.lon, title=point.title, uuid=point.uuid, user_id=user.id, origin=point.origin, added=point.added, #!!!? url=point.url) db_point = list(point_by_uuid(point.uuid))[0] tags_insert(db_point, user, point.tags, deny_namespaces=[]) point.id = db_point.id return "insert"
def books(fbooks, fauthors): authors = {} type_author = str( web.query("SELECT * FROM thing WHERE site_id=1 AND key='/type/author'") [0].id) type_edition = str( web.query("SELECT * FROM thing WHERE site_id=1 AND key='/type/edition'" )[0].id) result = select( "SELECT * FROM datum ORDER BY thing_id WHERE end_revision=2147483647") t1 = time.time() for i, t in enumerate(parse_datum(result)): if t['type'] == type_author: fauthors.write(str(t)) fauthors.write("\n") elif t['type'] == type_edition: fbooks.write(str(t)) fbooks.write("\n") if i and i % 10000 == 0: t2 = time.time() dt = t2 - t1 t1 = t2 print("%d: 10000 books read in %f time. %f things/sec" % (i, dt, 10000 / dt))
def remove_point_from_project(point, project): web.query(""" UPDATE projects_points SET visible = 0 WHERE location_id = $point_id AND project_id = $project_id ; """, vars=dict(point_id=point.id, project_id=project.id))
def point_update(point, user): web.query(""" UPDATE locations SET title=$title, uuid=$uuid WHERE id = $id ; """, vars=point.copy())
def regenerate(uid=0): """Called when an anonymous user becomes authenticated or vice-versa.""" old_session_id = web.cookies()._SID_ new_session_id = _generate_id() web.setcookie("_SID_",new_session_id) #uid = int(uid) #print web.query("UPDATE sessions SET uid = '$uid', sid = $new_session_id WHERE sid = $old_session_id", vars=locals(),_test=True) web.query("UPDATE sessions SET uid = $uid, sid = $new_session_id WHERE sid = $old_session_id", vars=locals())
def drupy_cron(): """clean out old stuff in the watchdog table""" time1 = time() - variable_get('watchdog_clear', 604800) time2 = time() - 3600 web.transact() web.query('DELETE FROM watchdog WHERE timestamp < $time1', vars=locals()) web.query('DELETE FROM flood WHERE timestamp < $time2', vars=locals()) web.commit()
def multiple_insert(table, values, seqname=None): """Inserts multiple rows into a table using sql copy.""" def escape(value): if value is None: return r'\N' elif isinstance(value, string_types): value = value.replace('\\', r'\\') # this must be the first one value = value.replace('\t', r'\t') value = value.replace('\r', r'\r') value = value.replace('\n', r'\n') return value elif isinstance(value, bool): return value and 't' or 'f' else: return str(value) def increment_sequence(seqname, n): """Increments a sequence by the given amount.""" d = web.query( "SELECT setval('%s', $n + (SELECT last_value FROM %s), true) + 1 - $n AS START" % (seqname, seqname), locals(), ) return d[0].start def write(path, data): f = open(path, 'w') f.write(web.safestr(data)) f.close() if not values: return [] if seqname is None: seqname = table + "_id_seq" # print("inserting %d rows into %s" % (len(values), table)) columns = get_table_columns(table) if seqname: n = len(values) start = increment_sequence(seqname, n) ids = range(start, start + n) for v, id in zip(values, ids): v['id'] = id else: ids = None data = [] for v in values: assert set(v.keys()) == set(columns) data.append("\t".join([escape(v[c]) for c in columns])) filename = tempfile.mktemp(suffix='.copy', prefix=table) write(filename, "\n".join(data)) web.query("COPY %s FROM '%s'" % (table, filename)) return ids
def delete(self, id, providerName): username = self.usernameProvider.get() web.transact() web.delete('places', where="providerid=%s and provider=%s" % \ (web.db.sqlquote(id), web.db.sqlquote(providerName))) web.query(self.SQL_ACTION % (web.db.sqlquote(username), web.db.sqlquote('elimino propiedad %s-%s' % (providerName, id)))); web.commit()
def query(sql_query, vars={}, limit = 10, count_query = None, processed=False, _test=False): """Works very similar to web.query(), but it returns a tuple of the query result and pager object (which just holds the page numbers links to display) Typical use: iter_nodes, page_nums = inc.pager.query('SELECT * FROM node WHERE uid=5 ORDER BY nid') for node in iter_nodes: print node print page_nums.render() DOCTEST >>> import pager >>> limit=10 >>> iter_nodes, page_nums = pager.query('''SELECT n.nid, c.cache, c.nid \ ... AS cache_nid, c.vid as cache_vid, n.vid, n.type, \ ... n.status, n.created, n.changed, n.comment, n.promote, n.sticky, \ ... u.uid, u.name, u.picture, u.data FROM node n INNER JOIN \ ... users u ON u.uid = n.uid LEFT JOIN cache_node c ON c.nid = n.nid \ ... AND c.vid = n.vid WHERE n.promote = 1 AND n.status = 1 \ ... ORDER BY n.sticky DESC, n.created DESC''',limit=limit, _test=True) count_query: SELECT COUNT(*) FROM node n INNER JOIN users u ON u.uid = n.uid LEFT JOIN cache_node c ON c.nid = n.nid AND c.vid = n.vid WHERE n.promote = 1 AND n.status = 1 >>> iter_nodes <sql: 'SELECT n.nid, c.cache, c.nid AS cache_nid, c.vid as cache_vid, n.vid, n.type, n.status, n.created, n.changed, n.comment, n.promote, n.sticky, u.uid, u.name, u.picture, u.data FROM node n INNER JOIN users u ON u.uid = n.uid LEFT JOIN cache_node c ON c.nid = n.nid AND c.vid = n.vid WHERE n.promote = 1 AND n.status = 1 ORDER BY n.sticky DESC, n.created DESC LIMIT 10 OFFSET 40'> NOTE: right now the regex only works when the sql is all caps. i.e. inc.pager.query('select * From node WHERE uid=5 ORDER BY nid') would not work. It's a good convention, but maybe should fix the regex to accept non caps in the future? """ if not processed and not isinstance(sql_query, web.db.SQLQuery): sql_query = str(web.db.reparam(sql_query, vars)) if not count_query: p = re.compile(r'SELECT.*?FROM (.*) ORDER BY .*') count_query = p.sub(lambda m: "SELECT COUNT(*) FROM %s" % m.group(1), sql_query) if _test: num_pages=10 page = 5 print 'count_query:', count_query else: count = web.query(count_query)[0].values()[0] num_pages = int(float(count) / limit + 1) page = _current_page() #page number validation #todo !!! wait a minute, maybe these two lines are no good. # cause then there can be many urls for the first and last pages... if page < 1: page=1 elif page > num_pages: page=num_pages p = pager(page,num_pages) offset = (page-1)*limit return web.query(''.join((sql_query,' LIMIT $limit OFFSET $offset')), vars={'limit':limit,'offset':offset},_test=_test), p
def unset_policies(object, user, roles, adder_user): for role in roles: web.query(""" DELETE FROM locations_policy_table WHERE user_id = $user_id AND location_id=$location_id AND role=$role ;""", vars={'user_id': user.id, 'location_id': object.id, 'role': role})
def POST(self, providerName, id, state): lid = scrappers[providerName].local(id) web.transact() web.query(self.SQL_UPDATE % (web.db.sqlquote(state), lid)) web.query(self.SQL_ACTION % (web.db.sqlquote(usernameProvider.get()), web.db.sqlquote('cambio al estado %s la prop %s-%s' % (state,providerName,id)))); web.commit() web.seeother('../../')
def __setitem__(self, key, item): # this should be transaction, but is ok as # profile should not be edited by many people at once self.__delitem__(key) if not isinstance(key, tuple): key = None, key web.query("""INSERT INTO profile (lang, prop_key, prop_value) VALUES ($lang, $prop_key, $prop_value);""", vars=dict(lang=key[0], prop_key=key[1], prop_value=item)) self.__dict__["_data"][key] = item
def __delitem__(self, key): if not isinstance(key, tuple): key = None, key web.query("""DELETE FROM profile WHERE lang=$lang AND prop_key=$prop_key;""", vars=dict(lang=key[0], prop_key=key[1],)) try: del self.__dict__["_data"][key] except: pass # no problem if the key is not there
def multiple_insert(table, values, seqname=None): """Inserts multiple rows into a table using sql copy.""" def escape(value): if value is None: return "\N" elif isinstance(value, basestring): value = value.replace('\\', r'\\') # this must be the first one value = value.replace('\t', r'\t') value = value.replace('\r', r'\r') value = value.replace('\n', r'\n') return value elif isinstance(value, bool): return value and 't' or 'f' else: return str(value) def increment_sequence(seqname, n): """Increments a sequence by the given amount.""" d = web.query( "SELECT setval('%s', $n + (SELECT last_value FROM %s), true) + 1 - $n AS START" % (seqname, seqname), locals()) return d[0].start def write(path, data): f = open(path, 'w') f.write(web.utf8(data)) f.close() if not values: return [] if seqname is None: seqname = table + "_id_seq" #print "inserting %d rows into %s" % (len(values), table) columns = get_table_columns(table) if seqname: n = len(values) start = increment_sequence(seqname, n) ids = range(start, start+n) for v, id in zip(values, ids): v['id'] = id else: ids = None data = [] for v in values: assert set(v.keys()) == set(columns) data.append("\t".join([escape(v[c]) for c in columns])) filename = tempfile.mktemp(suffix='.copy', prefix=table) write(filename, "\n".join(data)) web.query("COPY %s FROM '%s'" % (table, filename)) return ids
def get_all_values(index): if index in config.have_many_values: db_values = web.query('select value from %ss' % index) return [row.value for row in db_values] elif index in config.compound_indexes.keys(): compound_info = config.compound_indexes[index] db_values = web.query('select distinct(%s) as value from images order by %s' % (compound_info['reverse_query'](), get_order(index))) return [str(row.value) for row in db_values] else: db_values = web.query('select distinct(%s) as value from images order by %s' % (index, get_order(index))) return [row.value for row in db_values]
def select(query, chunk_size=50000): """Selects large number of rows efficiently using cursors.""" web.transact() web.query('DECLARE select_cursor CURSOR FOR ' + query) while True: result = web.query('FETCH FORWARD $chunk_size FROM select_cursor', vars=locals()) if not result: break for r in result: yield r web.rollback()
def group_update(user, group): #!!! delete? web.query(""" DELETE FROM group_users WHERE user_id = $user_id AND group_id = $group_id; """, vars=dict(user_id=user.id, group_id=group.id)) return web.query(""" INSERT INTO group_users (user_id, group_id) VALUES ($user_id, $group_id); """, vars=dict(user_id=user.id, group_id=group.id))
def get_default_values(sel_ik, index): """will print all the possible keys an index could use in order to still show at least one file with the current selection of i/k in mind""" value_per_index = [] #get all the possible values from the index if sel_ik: #building the ignore clause for (sel_index, sel_value) in sel_ik: if sel_index == index: if value_per_index: value_per_index.append(sel_value) else: value_per_index = [sel_value, ] if value_per_index: ignore_clause = "and %s not in ("+", ".join(['"'+ivalue+'"' for ivalue in value_per_index])+")" else: ignore_clause = "" if index in config.have_many_values: additional_clauses = build_clauses(sel_ik, 'and ', 'images.') if ignore_clause: temp_ignore_clause = ignore_clause % "value" else: temp_ignore_clause = "" #web.debug('GET VALUE : select value, count(images.id) as quantity from images_%(index)ss , %(index)ss, images where %(index)s_id = %(index)ss.id and images_%(index)ss.image_id = images.id %(temp_ignore_clause)s %(additional_clauses)s group by %(index)s_id' % (vars())) return web.query('select value, count(images.id) as quantity from images_%ss , %ss, images where %s_id = %ss.id and images_%ss.image_id = images.id %s %s group by %s_id order by %s' % (index, index, index, index, index, temp_ignore_clause , additional_clauses, index, get_order(index))) else: additional_clauses = build_clauses(sel_ik, 'and ') if index in config.compound_indexes.keys(): #need to get database specific query to match value db_value = config.compound_indexes[index]['reverse_query']() else: db_value = index if ignore_clause: temp_ignore_clause = ignore_clause % db_value else: temp_ignore_clause = "" #web.debug('GET VALUE: select %s as value, count(id) as quantity from images where 1=1 %s %s group by value' % (db_value, temp_ignore_clause , additional_clauses)) return web.query('select %s as value, count(id) as quantity from images where 1=1 %s %s group by value order by %s' % (db_value, temp_ignore_clause , additional_clauses, get_order(index))) else: #simpler case, left here for the sake of simplicity if index in config.have_many_values: #web.debug('select value, count(image_id) as quantity from images_%ss , %ss where %s_id = id group by %s_id' % (index, index, index, index)) return web.query('select value, count(image_id) as quantity from images_%ss , %ss where %s_id = id group by %s_id order by %s' % (index, index, index, index , get_order(index))) else : if index in config.compound_indexes.keys(): #need to get database specific query to match value db_value = config.compound_indexes[index]['reverse_query']() else: db_value = index return web.query('select %s as value, count(id) as quantity from images group by value order by %s' % (db_value, get_order(index)))
def point_full_update(point, user): web.query(""" UPDATE locations SET title=$title, uuid=$uuid, lat=$lat, lon=$lon, visible=$visible, url=$url WHERE id = $id ; """, vars=point.copy())
def POST(self, providerName, id): lid = scrappers[providerName].local(id) i = web.input() username = usernameProvider.get() web.transact() n = web.insert('places_forum', idPlace=lid, owner=username, description=i.description) web.query(self.SQL_ACTION % (web.db.sqlquote(username), web.db.sqlquote('agrego un comentario a %s-%s' % (providerName,id)))); web.commit() web.seeother('../')
def store_infos(infos, extra_db_entries): print " %s" % (infos) #web.debug(" %s" % (infos)) simple_infos = infos.copy() multiple_infos = {} for imv in config.have_many_values: try: del simple_infos[imv] multiple_infos[imv] = infos[imv] except KeyError: pass #checking for file renaming with sha possiblePrevFiles = web.query("select id, filename, batch from images where sha ='"+infos['sha']+"'") updatingFile = False if len(possiblePrevFiles) == 1: #file found in db print "INFO duplicate found : "+infos['filename'] prevFile = possiblePrevFiles[0] file_id = prevFile.id simple_infos['batch'] = prevFile.batch try: extra_db_entries.remove(prevFile.filename) web.update('images', 'id = %s' % file_id, None, **simple_infos) updatingFile = True except ValueError: #raise with .remove when the filename do not match print "WARNING duplicate sha accross fileset, creating new entry" else: if len(possiblePrevFiles) > 1: #more than one file with this sha... print "INFO sha present multiple time for file : "+infos["filename"] file_id = web.insert('images', True, **simple_infos) for index in multiple_infos.keys(): #store the value in its table for value in multiple_infos[index]: try: value_id = web.insert(index+'s', True, **{"value" : value}) #debuginsert(index+'s', False, **{"value" : value}) except: #TODO should be IntegrityError for mysql but not sure how best integrate that without breaking the DB abstraction... #but if the error wasn't an IntegrityError then the next line should fail value_id = web.query('select id from %ss where value = "%s"' % (index, value))[0].id #store the relationship between the value and the file try: web.insert("images_"+index+'s', False, **{index+"_id": value_id, "image_id" : file_id}) except Exception, inst: #if we are update a file we might raise some integrity error here if updatingFile: pass else: raise inst
def read(): """returns a user storage object associated with the session""" cookies = web.cookies() if not hasattr(cookies,'_SID_'): # this is the case of first time visitors and clients that # don't store cookies (eg. web crawlers). print 'not capable of cookies or a brand new user, so i am adding one' new_sid = _generate_id() user = mod.user.anonymous_user(sid=new_sid) return user #Otherwise, if the session is still active, we have a record of the client's session in the database. sid = cookies._SID_ query = web.query("SELECT u.*, s.* FROM users u INNER JOIN sessions s \ ON u.uid = s.uid WHERE s.sid = $sid", vars=locals()) print web.query("SELECT u.*, s.* FROM users u INNER JOIN sessions s \ ON u.uid = s.uid WHERE s.sid = $sid", vars=locals(), _test=True) try: user = query[0] except IndexError: # most likely this means they have a cookie, but are anonymous. # so we'll make an anoymous user using their sid # or their cookie could have expired or something, i don't really understand #new_sid = _generate_id() query = web.query("SELECT * FROM sessions s \ WHERE s.sid = $sid", vars=locals()) try: query[0] user = mod.user.anonymous_user(sid=sid) except IndexError: # their cookie is f****d up, assume they are first time visitors print 'f****d up cookie, assuming first time visitor' new_sid = _generate_id() user = mod.user.anonymous_user(sid=new_sid) return user if user.uid > 0: # they are an authenticated user # Add roles to user user.roles = {glbl.constant.authenticated_role_id:'authenticated user'} result = web.query("SELECT r.rid, r.name FROM role r INNER JOIN users_roles ur \ ON ur.rid = r.rid WHERE ur.uid = $user.uid", vars=locals()) for role in result: user.roles[role.rid] = role.name else: # they are anonymous, so make sure they keep the sid from the cookie and not from the # users table that was queried to make the user object user = mod.user.anonymous_user(sid=sid) # They have a session id in the db, so we set that before # returning the user. user.session_in_db = True return user
def tags_remove(point, tags): for tag in tags: tag_data = dict(point_id=point.id, tag=tag.tag, tag_namespace=tag.tag_namespace) web.query(""" DELETE FROM locations_users_tags USING locations_users_tags, tags WHERE tags.id = locations_users_tags.tag_id AND location_id = $point_id AND tags.tag = $tag AND tags.tag_namespace = $tag_namespace; """, vars=tag_data)
def unset_policies(object, user, roles, adder_user): for role in roles: web.query(""" DELETE FROM locations_policy_table WHERE user_id = $user_id AND location_id=$location_id AND role=$role ;""", vars={ 'user_id': user.id, 'location_id': object.id, 'role': role })
def profile_update(user, profile): for k, v in profile.items(): web.query(""" DELETE FROM user_profiles WHERE user_id = $user_id AND prop_key=$prop_key; """, vars=dict(user_id=user.id, prop_key=k,)) web.query(""" INSERT INTO user_profiles (user_id, prop_key, prop_value) VALUES ($user_id, $prop_key, $prop_value); """, vars=dict(user_id=user.id, prop_key=k, prop_value=v))
def hide_project(project): web.query(""" UPDATE locations SET visible = 0 WHERE id = $id ; """, vars=project.copy()) # quick fix: otherwise count is broken!!! web.query(""" DELETE FROM project_users WHERE project_id = $id """, vars=project.copy())
def __delitem__(self, key): if not isinstance(key, tuple): key = None, key web.query("""DELETE FROM profile WHERE lang=$lang AND prop_key=$prop_key;""", vars=dict( lang=key[0], prop_key=key[1], )) try: del self.__dict__["_data"][key] except: pass # no problem if the key is not there
def login(email='',user='',remember_me=''): # TODO: implement login by username in addition to email r = '0' if remember_me: r = '1' login_time = int(time.time()) user = web.select('users',where='email = $email',vars=locals())[0] #print user web.transact() web.query("UPDATE users SET login = $login_time, remember_me = $r \ WHERE uid = $user.uid", vars=locals()) #print user.uid inc.session.regenerate(uid=user.uid) web.commit()
def note_profile_update(object, profile): for k, v in profile.items(): web.query(""" DELETE FROM note_profiles WHERE note_id = $location_id AND prop_key=$prop_key; """, vars=dict(note_id=object.id, prop_key=k,)) if v is not None: web.query(""" INSERT INTO note_profiles (note_id, prop_key, prop_value) VALUES ($note_id, $prop_key, $prop_value); """, vars=dict(note_id=object.id, prop_key=k, prop_value=v))
def hide_point(point): web.query(""" UPDATE locations SET visible = 0 WHERE id = $id ; """, vars=point.copy()) web.query(""" UPDATE projects_points SET visible = 0 WHERE location_id = $id ; """, vars=point.copy())
def delete_point(point): web.query(""" DELETE FROM locations WHERE id = $id ; """, vars=point.copy()) web.query(""" DELETE FROM projects_points WHERE location_id = $id ; """, vars=point.copy()) web.query(""" DELETE FROM locations_users_tags WHERE location_id = $id ; """, vars=point.copy()) web.query(""" DELETE FROM notes WHERE location_id = $id ; """, vars=point.copy())
def sync(): #list all files from disk and DB disk_entries = get_files() db_entries = [f['filename'] for f in web.query("select filename from images")] extra_db_entries = [] print "%s disk entries\n%s db entries\n" % (len(disk_entries), len(db_entries)) for f in db_entries: if f in disk_entries: disk_entries.remove(f) else: extra_db_entries.append(f) print "%s EXTRA disk entries" % (len(disk_entries)) build_all(disk_entries, extra_db_entries) print "\n%s EXTRA db entries" % (len(extra_db_entries)) for f in extra_db_entries: print "removing %s from DB" % f #remove the info from the db file_id = web.query('select id from images where filename = "%s"' % f)[0].id web.query("delete from images where id = %s" % file_id) for imv in config.have_many_values: #removing the infos from the various multiple indexes web.query("delete from images_%ss where image_id = %s" % (imv, file_id)) for imv in config.have_many_values: print "cleaning the possibles orphan values for the index %s" % imv web.query("delete from %ss where id not in (select %s_id from images_%ss)" % (imv, imv, imv))
def GET(self, provider=None, id=None): ret = [] for i in web.query(self.SQL_PLACES): m = self.rePoint.match(i.astext) ret.append([int(i.id), i.provider, i.providerid, float(m.group(1)), float(m.group(2)), i.state]) if provider == None: a = web.query(self.SQL_BBOX)[0].the_geom else: a = web.query("%s WHERE provider = %s and providerid=%s" % (self.SQL_BBOX, web.db.sqlquote(provider), web.db.sqlquote(id)))[0].the_geom bound = self.rePolygon.match(a).group(1).split(',') print render.map_data(ret, bound[0].split(' '), bound[2].split(' '))
def note_profile(object): return dict([(p.prop_key, p.prop_value) for p in web.query(""" SELECT * FROM note_profiles WHERE note_id = $note_id; """, vars=dict(note_id=object.id, ))])
def update_projects_points(project, point): project_id = project.id if not hasattr(point, "id"): db_point = list(point_by_uuid(point.uuid)) if db_point: # !!! should we update lat, lon too? point_id = db_point[0].id else: point_id = 0 #!!! else: point_id = point.id exists = web.query(""" SELECT * FROM projects_points WHERE location_id=$point_id AND project_id=$project_id AND projects_points.visible = 1 LIMIT 1; """, vars=locals()) if not exists: web.insert( "projects_points", location_id=point_id, project_id=project_id, visible=project.visible and getattr(point, "visible", 1), )
def get_modified_pages(self, url, user_id): site_id = core.db.get_site_id(url) # @@ improve later d = web.query( """ SELECT page.id as id, page.path as path, MAX(version.revision) as revision, MAX(review.revision) as reviewed_revision FROM page JOIN version ON page.id = version.page_id LEFT OUTER JOIN review ON page.id = review.page_id AND review.user_id=$user_id GROUP BY page.id, page.path """, vars=locals(), ) d = [ p for p in d if not p.reviewed_revision or p.revision > p.reviewed_revision ] return d
def get_thing(id): sql = "select key, value from datum where thing_id=%d and end_revision=2147483647 and key != 'type'" % id iter = web.query(sql) thing = {} for row in iter: thing[row.key] = row.value return thing
def user_by_location_role(location): return web.query("""SELECT """ + USERS_GROUP_BY + """ FROM users, locations_policy_table WHERE locations_policy_table.user_id = users.id AND locations_policy_table.location_id=$location_id GROUP BY """ + USERS_GROUP_BY + """;""", vars=dict(location_id=location.id))
def projects_by_point_and_tags(point, tag_list): # union!!! if not tag_list: raise "invalid arguments" point_id = point.id return web.query(""" SELECT locations.* FROM ( """ + BASE_PROJECTS_QUERY + """ LEFT JOIN projects_points as pp ON (pp.project_id = locations.id AND pp.visible = 1) WHERE locations.visible = 1 AND locations.type = 'project' AND pp.location_id = $point_id GROUP BY """ + LOCATIONS_GROUP_BY() + """, users.username ORDER BY last_comment DESC ) as locations, tags, locations_users_tags WHERE locations_users_tags.location_id = locations.id AND """ + _has_tags(tag_list) + """ AND locations_users_tags.tag_id = tags.id GROUP BY """ + LOCATIONS_GROUP_BY() + """ """, vars=dict(point_id=point_id))
def points_nearby(lat, lon, radius=None, limit=None, project=None): limit = limit and ("LIMIT %i" % limit) or "" radius_cond = radius and ( """ AND sqrt(pow($lat - lat, 2) * $y + pow($lon - lon, 2) * $x) < $r """ ) or "" x, y = geo_support.meters_per_deg(lat, lon) if project: project_id = project.id qry = BASE_PROJECTS_QUERY + """ LEFT JOIN projects_points ON (projects_points.location_id = locations.id AND projects_points.visible = 1) WHERE locations.visible = 1 AND locations.type = 'point' """ + radius_cond + """ AND projects_points.project_id = """ + str( int(project_id)) + """ GROUP BY """ + LOCATIONS_GROUP_BY() + """, users.username ORDER BY last_comment DESC """ + limit else: qry = BASE_LOCATIONS_QUERY + """ WHERE locations.visible = 1 AND locations.type = 'point' """ + radius_cond + """ GROUP BY """ + LOCATIONS_GROUP_BY() + """, users.username ORDER BY last_comment DESC """ + limit return web.query(qry, vars=dict(x=x**2, y=y**2, r=radius, lat=lat, lon=lon))
def object_by_hard_(cond, **query): """This may be obsoleted anytime """ qry = (BASE_LOCATIONS_QUERY + """WHERE """ + cond + """ GROUP BY """ + LOCATIONS_GROUP_BY() + """, users.username LIMIT 1; """) return web.query(qry, vars=query)
def get(self): query = self._story_query() tmp_stories = web.query(query) stories = [] next_page = prev_page = False for idx, s in enumerate(tmp_stories): if idx >= config.stories_per_page: next_page = True break s.host = get_nice_host(s['url']) s.niceago = web.datestr(datetime.fromtimestamp(s['date_reddit']), datetime.now()) stories.append(s) if self.page != 1: prev_page = True next_page_link = prev_page_link = None if next_page: next_page_link = self.next_page(self.subreddit, self.page) if prev_page: prev_page_link = self.prev_page(self.subreddit, self.page) return { 'stories': stories, 'next_page': next_page, 'prev_page': prev_page, 'next_page_link': next_page_link, 'prev_page_link': prev_page_link }