def GET(self, format=None): i = web.input(address=None) pzip5 = re.compile(r'\d{5}') pzip4 = re.compile(r'\d{5}-\d{4}') pdist = re.compile(r'[a-zA-Z]{2}\-\d{2}') dists = None if not i.get('q'): i.q = i.get('zip') if i.q: if pzip4.match(i.q): zip, plus4 = i.q.split('-') dists = [x.district_id for x in db.select('zip4', where='zip=$zip and plus4=$plus4', vars=locals())] elif pzip5.match(i.q): try: dists = zip2rep.zip2dist(i.q, i.address) except zip2rep.BadAddress: return render.find_badaddr(i.q, i.address) if dists: d_dists = list(schema.District.select(where=web.sqlors('name=', dists))) out = apipublish.publish(d_dists, format) if out: return out if len(dists) == 1: raise web.seeother('/us/%s' % dists[0].lower()) elif len(dists) == 0: return render.find_none(i.q) else: return render.find_multi(i.q, d_dists) if pdist.match(i.q): raise web.seeother('/us/%s' % i.q) results = se.query(i.q) reps = schema.Politician.select(where=web.sqlors('id=', results)) if len(reps) > 1: return render.find_multi_reps(reps, congress_ranges) else: try: rep = reps[0] web.seeother('/p/%s' % rep.id) except IndexError: raise web.notfound() else: index = list(schema.District.select(order='name asc')) for i in index: i.politician = list(db.select('curr_politician', where='district_id = $i.name', vars=locals())) out = apipublish.publish(index, format) if out: return out return render.districtlist(index)
def findPol(raw_name): name = cleanName(raw_name).replace(',', '').split(' ') name = map(string.lower, filter(lambda x: x, name)) p = db.select('politician', where=web.sqlors('LOWER(lastname)=', name) + ' AND (' + web.sqlors('LOWER(firstname)=', name) + ' OR ' + web.sqlors('LOWER(nickname)=', name) + ')').list() #print raw_name, "-->", name if p and len(p) == 1: return p[0].id
def process(c, ordering_func=None): # ordering_func is used when the query contains emebbabdle objects # # example: {'links': {'title: 'foo', 'url': 'http://example.com/foo'}} if c.datatype == 'ref': metadata = self.get_metadata(c.value) if metadata is None: # required object is not found so the query result will be empty. # Raise StopIteration to indicate empty result. raise StopIteration c.value = metadata.id if c.op == '~': op = Literal('LIKE') c.value = c.value.replace('*', '%').replace('_', r'\_') else: op = Literal(c.op) if c.key in ['key', 'type', 'created', 'last_modified']: #@@ special optimization to avoid join with thing.type when there are non-common properties in the query. #@@ Since type information is already present in property table, #@@ getting property id is equivalent to join with type. if c.key == 'type' and type_required: return if isinstance(c.value, list): q = web.sqlors('thing.%s %s ' % (c.key, op), c.value) else: q = web.reparam('thing.%s %s $c.value' % (c.key, op), locals()) xwheres = [q] # Add thing table explicitly because get_table is not called tables['_thing'] = DBTable("thing") else: table = get_table(c.datatype, c.key) key_id = self.get_property_id(type, c.key) if not key_id: raise StopIteration q1 = web.reparam('%(table)s.key_id=$key_id' % {'table': table}, locals()) if isinstance(c.value, list): q2 = web.sqlors('%s.value %s ' % (table, op), c.value) else: q2 = web.reparam('%s.value %s $c.value' % (table, op), locals()) xwheres = [q1, q2] if ordering_func: xwheres.append(ordering_func(table)) wheres.extend(xwheres)
def delete(self, domain, mails=[]): self.domain = str(domain) if not iredutils.isDomain(self.domain): return (False, 'INVALID_DOMAIN_NAME') if not isinstance(mails, (list, tuple,)): return (False, 'INVALID_MAIL') self.mails = [str(v).lower() for v in mails if iredutils.isEmail(v) and str(v).endswith('@' + self.domain) ] # Remove alias from domain.defaultuseraliases. # Get domain profile. domainLib = domainlib.Domain() qr = domainLib.simpleProfile(domain=self.domain, columns=['domain', 'defaultuseraliases', ]) if qr[0] is True: self.domainProfile = qr[1] else: return qr self.defaultUserAliases = self.domainProfile.defaultuseraliases.split(',') # Remove from domain.defaultuseraliases. self.newDefaultAliases = [str(v).lower() for v in self.defaultUserAliases if v not in self.mails ] # Delete domain and related records. try: self.conn.delete('dbmail_aliases_extra', where='%s' % web.sqlors('alias = ', self.mails),) self.conn.delete('dbmail_aliases', where='%s' % web.sqlors('alias = ', self.mails),) self.conn.update('dbmail_domains', vars={'domain': self.domain, }, defaultuseraliases=','.join(self.newDefaultAliases), modified=iredutils.getGMTTime(), where='domain = $domain', ) web.logger( msg="Delete mail alias: %s." % ', '.join(self.mails), domain=self.domain, event='delete', ) return (True,) except Exception, e: return (False, str(e))
def process(c, ordering_func=None): # ordering_func is used when the query contains emebbabdle objects # # example: {'links': {'title: 'foo', 'url': 'http://example.com/foo'}} if c.datatype == 'ref': metadata = self.get_metadata(c.value) if metadata is None: # required object is not found so the query result wil be empty. # Raise StopIteration to indicate empty result. raise StopIteration c.value = metadata.id if c.op == '~': op = Literal('LIKE') c.value = c.value.replace('*', '%') else: op = Literal(c.op) if c.key in ['key', 'type', 'created', 'last_modified']: #@@ special optimization to avoid join with thing.type when there are non-common properties in the query. #@@ Since type information is already present in property table, #@@ getting property id is equivalent to join with type. if c.key == 'type' and type_required: return if isinstance(c.value, list): q = web.sqlors('thing.%s %s ' % (c.key, op), c.value) else: q = web.reparam('thing.%s %s $c.value' % (c.key, op), locals()) xwheres = [q] # Add thing table explicitly because get_table is not called tables['_thing'] = DBTable("thing") else: table = get_table(c.datatype, c.key) key_id = self.get_property_id(type_id, c.key) if not key_id: raise StopIteration q1 = web.reparam('%(table)s.key_id=$key_id' % {'table': table}, locals()) if isinstance(c.value, list): q2 = web.sqlors('%s.value %s ' % (table, op), c.value) else: q2 = web.reparam('%s.value %s $c.value' % (table, op), locals()) xwheres = [q1, q2] if ordering_func: xwheres.append(ordering_func(table)) wheres.extend(xwheres)
def get_metadata_list(self, keys): where = web.reparam('site_id=$self.site_id', locals()) + web.sqlors( 'key=', keys ) result = self.db.select('thing', what='*', where=where).list() d = dict((r.key, r) for r in result) return d
def findPol(raw_name): name = cleanName(raw_name).replace(',','').split(' ') name = map(string.lower,filter(lambda x: x, name)) p = db.select('politician', where=web.sqlors('LOWER(lastname)=',name) + ' AND (' + web.sqlors('LOWER(firstname)=',name)+' OR '+web.sqlors('LOWER(nickname)=',name)+')').list() #print raw_name, "-->", name if p and len(p) == 1: return p[0].id
def GET(self, format=None): i = web.input(address=None) join = ['district' + ' LEFT OUTER JOIN politician ' 'ON (politician.district = district.name)'] if i.get('zip'): try: dists = zip2rep.zip2dist(i.zip, i.address) except zip2rep.BadAddress: return render.find_badaddr(i.zip, i.address) if len(dists) == 1: raise web.seeother('/us/%s' % dists[0].lower()) elif len(dists) == 0: return render.find_none(i.zip) else: dists = db.select(join, where=web.sqlors('name=', dists)) return render.find_multi(i.zip, dists) else: out = apipublish.publish([{ 'uri': 'http://watchdog.net/us/' + x.name.lower(), 'type': 'District', 'name': x.name, 'state': x.state, 'district': x.district, 'voting': x.voting, 'wikipedia': apipublish.URI(x.wikipedia) } for x in db.select('district')], format) if out is not False: return out dists = db.select(join, order='name asc') return render.districtlist(dists)
def load_election_results(d, distname): (year, votes, vote_pct) = (0,'0','0') if 'name' not in d: print "No name for the congress person for: ",distname return pname = d['name'].lower() if 'electionresults' not in d: print "No election results for %s repsenting %s." % (d['name'],distname) return for e in d['electionresults']: if 'candidate' in e and 'primary' not in e['election'] and \ pname.replace(' ','') in e['candidate'].lower().replace(' ',''): if int(e['election'][0:4]) > year: (year,votes) = (int(e['election'][0:4]), e['totalvotes']) if 'percent' in e: vote_pct = e['percent'] #print year, votes, vote_pct, d['name'], distname if year: pol=db.select('politician', what='id', where="district_id='"+distname+"' AND "+web.sqlors('lastname ilike ',pname.split(' ')), vars=locals()).list() if pol and len(pol)==1: polid=pol[0].id db.update('politician', where='id=$polid', n_vote_received=votes.replace(',','').replace('Unopposed','0'), pct_vote_received=vote_pct.replace('%',''), last_elected_year=year, vars=locals()); else: print "Couldn't find an id for %s representing %s." % (d['name'], distname) else: print "Didn't find a recent election for %s representing %s." %(d['name'], distname) #, pformat(d['electionresults'])
def calculate_per_capita(): """ """ print "Pre getting all populations per district..." pop_dist = {} for d in schema.District.select(where='est_population is not null'): pop_dist[d.name] = d.est_population print "Calculate the per-capita impact of each earmark..." pc = {} for e in db.select('earmark', what='final_amt, id', order='id asc'): done_states = set() amount = float(e.final_amt or 0) pop = 0 sponsors = db.query( "select district_id, state_id, id from politician, district, earmark_sponsor where politician.district_id = district.name and earmark_sponsor.politician_id = politician.id and earmark_id=$e.id", vars=locals()).list() if not sponsors: continue # Get the population for each district sponsoring for p in sponsors: if p.district_id != p.state_id: done_states.add(p.state_id) pop += pop_dist.get(p.district_id, 0) # Get the population for state sponsoring unless a district has from # within state also sponsors. for p in sponsors: if p.district_id == p.state_id: if p.state_id in done_states: continue done_states.add(p.state_id) pop += pop_dist.get(p.district_id, 0) if not pop: pc[e.id] = 0.0 else: pc[e.id] = amount / pop #print e.id, pc[e.id], amount, pop print "Aggregating per-capita impact to districts..." for d in schema.District.select(): if d.name == d.state_id: continue # Don't set for states. congress_people = set() senators = db.select('curr_politician', where='district_id = $d.state.code', vars=locals()) if senators: congress_people.update(p.id for p in senators) politician = db.select('curr_politician', where='district_id = $d.name', vars=locals()) if politician: congress_people.update(p.id for p in politician) ems = db.select('earmark_sponsor', what='distinct(earmark_id)', where=web.sqlors('politician_id=', congress_people)) empc = sum( map(lambda x: pc.get(x, 0.0), set(e.earmark_id for e in ems))) #print d.name, empc db.update('district', where='name=$d.name', earmark_per_capita=empc, vars=locals())
def delete(self, domains=[]): if not isinstance(domains, list): return (False, 'INVALID_DOMAIN_NAME') domains = [str(v).lower() for v in domains if iredutils.is_domain(v)] if not domains: return (True, ) sql_vars = { 'domains': domains, } # Delete domain and related records. try: self.conn.delete( 'domain', vars=sql_vars, where='domain IN $domains', ) self.conn.delete( 'alias_domain', vars=sql_vars, where='alias_domain IN $domains OR target_domain IN $domains', ) for tbl in ['alias', 'domain_admins', 'mailbox', \ 'recipient_bcc_domain', 'recipient_bcc_user', \ 'sender_bcc_domain', 'sender_bcc_user', \ ]: self.conn.delete( tbl, vars=sql_vars, where='domain IN $domains', ) # Delete real-time mailbox quota. try: self.conn.query( 'DELETE FROM used_quota WHERE %s' % \ web.sqlors('username LIKE ', ['%@' + d for d in domains]) ) except: pass for d in domains: web.logger( msg="Delete domain: %s." % (d), domain=d, event='delete', ) return (True, ) except Exception, e: return (False, str(e))
def query_census(location, hr_keys): # Use DISTINCT since some hr_keys map to multiple internal_keys (but should # have same value). #q = db.select('census', what='SUM(DISTINCT(value))', where=web.sqlors('hr_key=', hr_keys)+' AND location='+web.sqlquote(location)) q = db.query( 'SELECT SUM(value) FROM (SELECT DISTINCT value, hr_key FROM census WHERE ' + web.sqlors('hr_key=', hr_keys) + ' AND district_id=' + web.sqlquote(location) + ') AS foo;') if not q: return None return q[0].sum
def fix_type(type, type_id): print('fixing type', type, file=web.debug) prefix, multiple_types = get_table_prefix(type) keys_table = prefix + "_keys" keys = dict( (r.key, r.id) for r in db.query('SELECT * FROM ' + keys_table)) newkeys = {} # @@ There is a chance that we may overwrite one update with another when new id is in the same range of old ids. # @@ Example: # @@ UPDATE datum_str SET key_id=4 FROM thing WHERE thing.id = property.type AND key_id=1; # @@ UPDATE datum_str SET key_id=6 FROM thing WHERE thing.id = property.type AND key_id=4; # @@ In the above example, the second query overwrites the result of first query. # @@ Making id of property table more than max_id of datum_keys makes sure that this case never happen. id1 = db.query('SELECT max(id) as x FROM ' + keys_table)[0].x id2 = db.query('SELECT max(id) as x FROM property')[0].x print('max ids', id1, id2, file=web.debug) if id1 > id2: db.query("SELECT setval('property_id_seq', $id1)", vars=locals()) for key in keys: newkeys[key] = db.insert('property', type=type_id, name=key) total_updated = {} for d in ['str', 'int', 'float', 'boolean', 'ref']: table = prefix + '_' + d print('fixing', type, table, file=web.debug) for key in keys: old_key_id = keys[key] new_key_id = newkeys[key] if multiple_types: updated = db.query( 'UPDATE %s SET key_id=$new_key_id FROM thing WHERE thing.id = %s.thing_id AND thing.type=$type_id AND key_id=$old_key_id' % (table, table), vars=locals(), ) else: updated = db.update( table, key_id=new_key_id, where='key_id=$old_key_id', vars=locals(), ) total_updated[key] = total_updated.get(key, 0) + updated print('updated', updated, file=web.debug) unused = [k for k in total_updated if total_updated[k] == 0] print('unused', unused, file=web.debug) db.delete('property', where=web.sqlors('id =', [newkeys[k] for k in unused]))
def get_related(module_id): """Get a similar module.""" module_tags = [t.tag for t in tags.get_tags(module_id)] m = web.listget( db.select('tags', vars = dict(id=module_id, tags=module_tags), what = 'module_id', where = 'module_id != $id and %s' % web.sqlors("tag = ", module_tags), group = 'module_id having count(module_id) > 1', order = 'rand()'), 0, False) return m and get_module(m.module_id)
def delete(self, domain, mails=[]): self.domain = str(domain) if not iredutils.isDomain(self.domain): return (False, 'INVALID_DOMAIN_NAME') if not isinstance(mails, list): return (False, 'INVALID_MAIL') self.mails = [ str(addr).lower() for addr in mails if iredutils.isEmail(addr) and str(addr).endswith('@' + self.domain) ] if not self.mails: return (False, 'INVALID_MAIL') # Delete domain and related records. try: # Delete from aliases. self.conn.delete('dbmail_aliases', where='%s' % web.sqlors('deliver_to = ', self.mails)) self.conn.delete('dbmail_aliases', where='%s' % web.sqlors('alias = ', self.mails)) # Delete user record. self.conn.delete('dbmail_users', where='%s' % web.sqlors('userid = ', self.mails)) web.logger( msg="Delete user: %s." % ', '.join(self.mails), domain=self.domain, event='delete', ) return (True, ) except Exception, e: return (False, str(e))
def get_related(module_id): """Get a similar module.""" module_tags = [t.tag for t in tags.get_tags(module_id)] m = web.listget( db.select('tags', vars=dict(id=module_id, tags=module_tags), what='module_id', where='module_id != $id and %s' % web.sqlors("tag = ", module_tags), group='module_id having count(module_id) > 1', order='rand()'), 0, False) return m and get_module(m.module_id)
def delete(self, domains=[]): if not isinstance(domains, list): return (False, 'INVALID_DOMAIN_NAME') domains = [str(v).lower() for v in domains if iredutils.is_domain(v) ] if not domains: return (True, ) sql_vars = {'domains': domains, } # Delete domain and related records. try: self.conn.delete('domain', vars=sql_vars, where='domain IN $domains', ) self.conn.delete( 'alias_domain', vars=sql_vars, where='alias_domain IN $domains OR target_domain IN $domains', ) for tbl in ['alias', 'domain_admins', 'mailbox', \ 'recipient_bcc_domain', 'recipient_bcc_user', \ 'sender_bcc_domain', 'sender_bcc_user', \ ]: self.conn.delete( tbl, vars=sql_vars, where='domain IN $domains', ) # Delete real-time mailbox quota. try: self.conn.query( 'DELETE FROM used_quota WHERE %s' % \ web.sqlors('username LIKE ', ['%@' + d for d in domains]) ) except: pass for d in domains: web.logger(msg="Delete domain: %s." % (d), domain=d, event='delete',) return (True,) except Exception, e: return (False, str(e))
def search(query, offset=0, limit=10): query = get_nice_query(query) if not query: return [], False def sqlands(left, lst): return left + (" and %s " % left).join(lst) q = [str(web.sqlquote(w)) for w in query.split()] tag_query = web.sqlors("tag = ", q) q = [str(web.sqlquote("%%" + w + "%%")) for w in query.split()] where = [] for c in ["title", "url", "description", "author"]: where.append(sqlands("%s like " % c, q)) text_query = " or ".join(where) params = { "tag_query": tag_query, "text_query": text_query, "offset": offset, "limit": limit + 1, "size": len(query), } m = list( db.query( "\ (select distinct m.id, title, url, description, author, screenshot, \ calculated_vote as votes, m.datetime_created as dates \ from modules as m left join tags as t on m.id = t.module_id \ where %(tag_query)s \ group by t.module_id \ having count(t.module_id) >= %(size)d) \ union \ (select distinct m.id, title, url, description, author, screenshot, \ calculated_vote as votes, m.datetime_created as dates \ from modules as m \ where %(text_query)s \ order by calculated_vote desc, datetime_created desc) \ order by votes desc, dates desc limit %(limit)d offset %(offset)d" % params ) ) has_next = len(m) > limit return m[:limit], has_next
def calculate_per_capita(): """ """ print "Pre getting all populations per district..." pop_dist = {} for d in schema.District.select(where='est_population is not null'): pop_dist[d.name] = d.est_population print "Calculate the per-capita impact of each earmark..." pc = {} for e in db.select('earmark', what='final_amt, id', order='id asc'): done_states = set() amount = float(e.final_amt or 0) pop = 0 sponsors = db.query("select district_id, state_id, id from politician, district, earmark_sponsor where politician.district_id = district.name and earmark_sponsor.politician_id = politician.id and earmark_id=$e.id",vars=locals()).list() if not sponsors: continue # Get the population for each district sponsoring for p in sponsors: if p.district_id != p.state_id: done_states.add(p.state_id) pop += pop_dist.get(p.district_id, 0) # Get the population for state sponsoring unless a district has from # within state also sponsors. for p in sponsors: if p.district_id == p.state_id: if p.state_id in done_states: continue done_states.add(p.state_id) pop += pop_dist.get(p.district_id, 0) if not pop: pc[e.id] = 0.0 else: pc[e.id] = amount / pop #print e.id, pc[e.id], amount, pop print "Aggregating per-capita impact to districts..." for d in schema.District.select(): if d.name == d.state_id: continue # Don't set for states. congress_people = set() senators = db.select('curr_politician', where='district_id = $d.state.code', vars=locals()) if senators: congress_people.update(p.id for p in senators) politician = db.select('curr_politician', where='district_id = $d.name', vars=locals()) if politician: congress_people.update(p.id for p in politician) ems = db.select('earmark_sponsor', what='distinct(earmark_id)', where=web.sqlors('politician_id=',congress_people)) empc = sum(map(lambda x: pc.get(x, 0.0), set(e.earmark_id for e in ems))) #print d.name, empc db.update('district',where='name=$d.name',earmark_per_capita=empc, vars=locals())
def search(query, offset=0, limit=10): query = get_nice_query(query) if not query: return [], False def sqlands(left, lst): return left + (' and %s ' % left).join(lst) q = [str(web.sqlquote(w)) for w in query.split()] tag_query = web.sqlors('tag = ', q) q = [str(web.sqlquote('%%' + w + '%%')) for w in query.split()] where = [] for c in ['title', 'url', 'description', 'author']: where.append(sqlands('%s like ' % c, q)) text_query = ' or '.join(where) params = { 'tag_query': tag_query, 'text_query': text_query, 'offset': offset, 'limit': limit + 1, 'size': len(query) } m = list(db.query('\ (select distinct m.id, title, url, description, author, screenshot, \ calculated_vote as votes, m.datetime_created as dates \ from modules as m left join tags as t on m.id = t.module_id \ where %(tag_query)s \ group by t.module_id \ having count(t.module_id) >= %(size)d) \ union \ (select distinct m.id, title, url, description, author, screenshot, \ calculated_vote as votes, m.datetime_created as dates \ from modules as m \ where %(text_query)s \ order by calculated_vote desc, datetime_created desc) \ order by votes desc, dates desc limit %(limit)d offset %(offset)d' \ % params)) has_next = len(m) > limit return m[:limit], has_next
def query(category, olid, offset=0, limit=10): category_id = get_category_id(category) if isinstance(olid, list): where = web.reparam('deleted=false AND category_id = $category_id AND ', locals()) \ + web.sqlors('olid=', olid) elif olid is None: where = web.reparam('deleted=false AND category_id=$category_id', locals()) else: where = web.reparam('deleted=false AND category_id=$category_id AND olid=$olid', locals()) result = getdb().select('cover', what='*', where= where, order='last_modified desc', offset=offset, limit=limit) return result.list()
def inner(obj2): # obj is obj2 # If we're being called, that means someone's trying to access # a lazy reference on obj. column = obj.columns[column_name] objs = obj._objs # First, we need to get the answer for all of us: if isinstance(column, Backreference): column.target = column._target() local_column = column.local_column.sql_name target_column = column.target_column order = column.order plural = column.plural else: local_column = column.sql_name target_column = column.target_column.sql_name order = None plural = False newobjs = {} for k in column.target.select( order=order, where=web.sqlors(target_column + ' = ', [getattr(x, local_column) for x in objs])): val = getattr(k, target_column) if plural: newobjs.setdefault(val, []).append(k) else: newobjs[val] = k # Then we need to add it to all of us: for xobj in objs: k = getattr(xobj, local_column) if k in newobjs: setattr(xobj.__class__, column_name, newobjs[k]) # Finally, we need to return it: return newobjs.get(getattr(obj, local_column), [])
def votes_by_caucus(self): caucuses = json.load(file('import/load/manual/caucuses.json')) members = sum([x['members'] for x in caucuses], []) result = db.select(['vote'], where=web.sqlors('politician_id=', members) + 'AND roll_id=' + web.sqlquote(self.id), vars=locals() ).list() if not result: return None votemap = dict((r.politician_id, r.vote) for r in result) d = {} for c in caucuses: cdict = d[c['name']] = {} for m in c['members']: v = votemap.get(m) cdict.setdefault(v, 0) cdict[v] += 1 return d
def fix_type(type, type_id): print >> web.debug, 'fixing type', type prefix, multiple_types = get_table_prefix(type) keys_table = prefix + "_keys" keys = dict((r.key, r.id) for r in db.query('SELECT * FROM ' + keys_table)) newkeys = {} #@@ There is a chance that we may overwrite one update with another when new id is in the same range of old ids. #@@ Example: #@@ UPDATE datum_str SET key_id=4 FROM thing WHERE thing.id = property.type AND key_id=1; #@@ UPDATE datum_str SET key_id=6 FROM thing WHERE thing.id = property.type AND key_id=4; #@@ In the above example, the second query overwrites the result of first query. #@@ Making id of property table more than max_id of datum_keys makes sure that this case never happen. id1 = db.query('SELECT max(id) as x FROM ' + keys_table)[0].x id2 = db.query('SELECT max(id) as x FROM property')[0].x print >> web.debug, 'max ids', id1, id2 if id1 > id2: db.query("SELECT setval('property_id_seq', $id1)", vars=locals()) for key in keys: newkeys[key] = db.insert('property', type=type_id, name=key) total_updated = {} for d in ['str', 'int', 'float', 'boolean', 'ref']: table = prefix + '_' + d print >> web.debug, 'fixing', type, table for key in keys: old_key_id = keys[key] new_key_id = newkeys[key] if multiple_types: updated = db.query('UPDATE %s SET key_id=$new_key_id FROM thing WHERE thing.id = %s.thing_id AND thing.type=$type_id AND key_id=$old_key_id' % (table, table), vars=locals()) else: updated = db.update(table, key_id=new_key_id, where='key_id=$old_key_id', vars=locals()) total_updated[key] = total_updated.get(key, 0) + updated print >> web.debug, 'updated', updated unused = [k for k in total_updated if total_updated[k] == 0] print >> web.debug, 'unused', unused db.delete('property', where=web.sqlors('id =', [newkeys[k] for k in unused]))
def votes_by_caucus(self): caucuses = json.load(file("import/load/manual/caucuses.json")) members = sum([x["members"] for x in caucuses], []) result = db.select( ["vote"], where=web.sqlors("politician_id=", members) + "AND roll_id=" + web.sqlquote(self.id), vars=locals(), ).list() if not result: return None votemap = dict((r.politician_id, r.vote) for r in result) d = {} for c in caucuses: cdict = d[c["name"]] = {} for m in c["members"]: v = votemap.get(m) cdict.setdefault(v, 0) cdict[v] += 1 return d
def load_into_db(pname, distname, electionresults, recent_election_year): #load the details of the winner in recent election results into `politician` table #and all the details of elections in district `distname` into the `past_elections` table with db.transaction(): for r in electionresults: candidate_id = r.candidate.split('(')[0].strip().lower() if r.year == recent_election_year and r.type == 'Gen' and pname in candidate_id: polid = db.update('politician', where="district_id=$distname AND %s" % (web.sqlors('lastname ilike ', pname.split(' '))), n_vote_received=r.votes, pct_vote_received=r.vote_pct, last_elected_year=r.year, vars=locals()) candidate_id = candidate_id.replace(' ', '_') if not db.select('past_elections', where='politician_id=$candidate_id and district_id=$distname ' 'and year=$r.year and type=$r.type', vars=locals()): db.insert('past_elections', seqname=False, politician_id=candidate_id, district_id=distname, votes_received=r.votes, pct_votes_received=r.vote_pct, type=r.type, year=r.year, expenditure=r.expenditure)
def delete(self, domains=None, keep_mailbox_days=0): if not domains: return (False, 'INVALID_DOMAIN_NAME') domains = [str(v).lower() for v in domains if iredutils.is_domain(v)] if not domains: return (True, ) msg = {} for domain in domains: dn = ldaputils.convert_keyword_to_dn(web.safestr(domain), accountType='domain') if dn[0] is False: return dn # Log maildir path in SQL table. try: qr = self.conn.search_s(attrs.DN_BETWEEN_USER_AND_DOMAIN + dn, ldap.SCOPE_ONELEVEL, "(objectClass=mailUser)", ['mail', 'homeDirectory']) if keep_mailbox_days == 0: keep_mailbox_days = 36500 # Convert keep days to string _now_in_seconds = time.time() _days_in_seconds = _now_in_seconds + (keep_mailbox_days * 24 * 60 * 60) sql_keep_days = time.strftime( '%Y-%m-%d', time.strptime(time.ctime(_days_in_seconds))) v = [] for obj in qr: deleted_mail = obj[1].get('mail')[0] deleted_maildir = obj[1].get('homeDirectory', [''])[0] v += [{ 'maildir': deleted_maildir, 'username': deleted_mail, 'domain': domain, 'admin': session.get('username'), 'delete_date': sql_keep_days }] if v: web.admindb.multiple_insert('deleted_mailboxes', values=v) except: pass try: connUtils.delete_ldap_tree(dn=dn, conn=self.conn) web.logger( msg="Delete domain: %s." % (domain), domain=domain, event='delete', ) except ldap.LDAPError as e: msg[domain] = str(e) # Delete real-time mailbox quota. try: web.admindb.query( 'DELETE FROM %s WHERE %s' % (settings.SQL_TBL_USED_QUOTA, web.sqlors('username LIKE ', ['%@' + d for d in domains]))) except: pass if msg == {}: return (True, ) else: return (False, ldaputils.getExceptionDesc(msg))
for domain in domains: dn = ldaputils.convert_keyword_to_dn(web.safestr(domain), accountType='domain') if dn[0] is False: return dn try: deltree.DelTree(self.conn, dn, ldap.SCOPE_SUBTREE) web.logger(msg="Delete domain: %s." % (domain), domain=domain, event='delete',) except ldap.LDAPError, e: msg[domain] = str(e) # Delete real-time mailbox quota. try: web.admindb.query( 'DELETE FROM used_quota WHERE %s' % \ web.sqlors('username LIKE ', ['%@' + d for d in domains]) ) except: pass if msg == {}: return (True,) else: return (False, ldaputils.getExceptionDesc(msg)) @decorators.require_global_admin def enableOrDisableAccount(self, domains, action, attr='accountStatus',): if domains is None or len(domains) == 0: return (False, 'NO_DOMAIN_SELECTED') result = {}
] forwarding_addresses_not_in_domain = [ addr for addr in mailForwardingAddresses if not addr.endswith('@' + self.domain) ] # Re-generate list of forwarding addresses, remove non-exist mail users in same domain. # Get `dbmail_users.user_idnr` of mail users. if len(forwarding_addresses_in_domain) > 0: qr = self.conn.select( 'dbmail_users', vars={ 'domain': self.domain, }, what='userid', where='domain = $domain AND %s' % (web.sqlors( 'userid = ', forwarding_addresses_in_domain), ), ) if qr: forwarding_addresses_in_domain = [ str(rcd.userid).lower() for rcd in qr ] if 'savecopy' in data.keys(): forwarding_addresses_in_domain += [self.user_idnr] else: # Save address=goto to keep catch-all working. forwarding_addresses_in_domain = [self.user_idnr] if forwarding_addresses_in_domain or forwarding_addresses_not_in_domain: sql_values = [{
try: deltree.DelTree(self.conn, dn, ldap.SCOPE_SUBTREE) web.logger( msg="Delete domain: %s." % (domain), domain=domain, event='delete', ) except ldap.LDAPError, e: msg[domain] = str(e) # Delete real-time mailbox quota. try: web.admindb.query( 'DELETE FROM used_quota WHERE %s' % \ web.sqlors('username LIKE ', ['%@' + d for d in domains]) ) except: pass if msg == {}: return (True, ) else: return (False, ldaputils.getExceptionDesc(msg)) @decorators.require_global_admin def enableOrDisableAccount( self, domains, action, attr='accountStatus',
def get_metadata_list(self, keys): where = web.reparam("site_id=$self.site_id", locals()) + web.sqlors("key=", keys) result = self.db.select("thing", what="*", where=where).list() d = dict((r.key, r) for r in result) return d
def get_metadata_list(self, keys): result = self.db.select('thing', what='*', where=web.sqlors('key=', keys)).list() d = dict((r.key, r) for r in result) return d
def update(self, profile_type, mail, data,): self.profile_type = str(profile_type) self.mail = str(mail) self.domain = self.mail.split('@', 1)[-1] if not iredutils.isEmail(self.mail) or not iredutils.isDomain(self.domain): return (False, 'INVALID_MAIL') # Pre-defined. sql_vars = {'mail': self.mail, 'domain': self.domain, } values = {} # Get cn. self.cn = data.get('cn', '') values['name'] = self.cn # Get accountStatus. #self.status = 0 # Disabled. #if 'accountStatus' in data.keys(): # self.status = 1 # Enabled. #values['active'] = self.status # Get access policy. #self.accessPolicy = str(data.get('accessPolicy', '')) #if self.accessPolicy in settings.SQL_ALIAS_ACCESS_POLICIES: # values['accesspolicy'] = self.accessPolicy # Get members & moderators from web form. self.mailForwardingAddresses = [ str(v).lower() for v in data.get('mailForwardingAddress', []) if iredutils.isEmail(str(v)) ] #self.moderators = [ # str(v).lower() # for v in data.get('moderators', []) # if iredutils.isEmail(str(v)) #] # Get mail forwarding addresses & moderators from form. self.newMailForwardingAddresses = set( str(v).strip().lower() for v in data.get('newMailForwardingAddresses').splitlines() if iredutils.isEmail(str(v).strip()) ) #self.newModerators = set( # str(v).strip().lower() # for v in data.get('newModerators').splitlines() # if iredutils.isEmail(str(v).strip()) #) # Get union set of old/new alias members. self.allMembers = set(self.mailForwardingAddresses) | self.newMailForwardingAddresses #self.allModerators = set(self.moderators) | self.newModerators # Remove non-exist accounts in same domain. # Get alias members & moderators which in same domain. self.membersInDomain = [v for v in self.allMembers if v.endswith('@' + self.domain)] self.membersNotInDomain = [v for v in self.allMembers if not v.endswith('@' + self.domain)] #self.moderatorsInDomain = [v for v in self.allModerators if v.endswith('@' + self.domain)] #self.moderatorsNotInDomain = [v for v in self.allModerators if not v.endswith('@' + self.domain)] # Re-generate list of alias members, remove non-exist members. if len(self.membersInDomain) > 0: qr = self.conn.select( 'dbmail_users', vars=sql_vars, what='userid', where='domain = $domain AND %s' % (web.sqlors('userid = ', self.membersInDomain)), ) self.membersInDomain = [str(rcd.userid) for rcd in qr] # Get alias moderators. """ if len(self.moderatorsInDomain) > 0: qr = self.conn.select( 'dbmail_users', what='userid', where='domain = %s AND ' % ( web.sqlquote(self.domain), web.sqlors('userid = ', self.moderatorsInDomain), ), ) self.moderatorsInDomain = [] for i in qr: self.moderatorsInDomain += [str(i.userid)] values['moderators'] = ','.join(self.moderatorsInDomain + self.moderatorsNotInDomain) """ try: self.conn.update( 'dbmail_aliases_extra', vars=sql_vars, where='alias=$mail', **values ) self.conn.delete('dbmail_aliases', vars=sql_vars, where='alias = $mail', ) if self.membersInDomain or self.membersNotInDomain: sql_values = [{'alias': self.mail, 'deliver_to': member, } for member in self.membersInDomain + self.membersNotInDomain ] self.conn.multiple_insert('dbmail_aliases', sql_values) return (True,) except Exception, e: return (False, str(e))
def representatives(self): ids = [x.id for x in db.select("curr_politician", where="district_id = $self.name", vars=locals())] return Politician.select(where=web.sqlors("id=", ids))
def get_blacklisted_authors_sql(): return web.sqlors('author=', [w.strip() for w in open('data/blacklist-authors.dat', 'U')])
def get_metadata_list_from_ids(self, ids): d = {} result = self.db.select('thing', what='*', where=web.sqlors('id=', ids)).list() d = dict((r.id, r) for r in result) return d
def delete(self, domains=None, keep_mailbox_days=0): if not domains: return (False, 'INVALID_DOMAIN_NAME') domains = [str(v).lower() for v in domains if iredutils.is_domain(v)] if not domains: return (True, ) if keep_mailbox_days == 0: keep_mailbox_days = 36500 sql_vars = {'domains': domains, 'admin': session.get('username'), 'keep_mailbox_days': keep_mailbox_days} # Log maildir paths of existing users try: sql_raw = ''' INSERT INTO deleted_mailboxes (username, maildir, domain, admin, delete_date) SELECT username, \ CONCAT(storagebasedirectory, '/', storagenode, '/', maildir) AS maildir, \ domain, \ $admin, \ DATE_ADD(NOW(), INTERVAL $keep_mailbox_days DAY) FROM mailbox WHERE domain IN $domains''' self.conn.query(sql_raw, vars=sql_vars) except Exception: pass # Delete domain and related records. try: self.conn.delete('domain', vars=sql_vars, where='domain IN $domains', ) self.conn.delete( 'alias_domain', vars=sql_vars, where='alias_domain IN $domains OR target_domain IN $domains', ) for tbl in ['alias', 'moderators', 'forwardings', 'domain_admins', 'mailbox', 'recipient_bcc_domain', 'recipient_bcc_user', 'sender_bcc_domain', 'sender_bcc_user']: self.conn.delete(tbl, vars=sql_vars, where='domain IN $domains') # Delete real-time mailbox quota. try: self.conn.query('DELETE FROM %s WHERE %s' % (settings.SQL_TBL_USED_QUOTA, web.sqlors('username LIKE ', ['%@' + d for d in domains]))) except: pass for d in domains: web.logger(msg="Delete domain: %s." % (d), domain=d, event='delete',) return (True,) except Exception, e: return (False, str(e))
def get_metadata_list(self, keys): where = web.reparam('site_id=$self.site_id', locals()) + web.sqlors('key=', keys) result = self.db.select('thing', what='*', where=where).list() d = dict((r.key, r) for r in result) return d
def GET(self, format=None): i = web.input(address=None) pzip5 = re.compile(r'\d{5}') pzip4 = re.compile(r'\d{5}-\d{4}') pdist = re.compile(r'[a-zA-Z]{2}\-\d{2}') dists = None if not i.get('q'): i.q = i.get('zip') if i.q: if pzip4.match(i.q): zip, plus4 = i.q.split('-') dists = [ x.district_id for x in db.select('zip4', where='zip=$zip and plus4=$plus4', vars=locals()) ] elif pzip5.match(i.q): try: dists = zip2rep.zip2dist(i.q, i.address) except zip2rep.BadAddress: return render.find_badaddr(i.q, i.address) if dists: d_dists = list( schema.District.select(where=web.sqlors('name=', dists))) out = apipublish.publish(d_dists, format) if out: return out if len(dists) == 1: raise web.seeother('/us/%s' % dists[0].lower()) elif len(dists) == 0: return render.find_none(i.q) else: return render.find_multi(i.q, d_dists) if pdist.match(i.q): raise web.seeother('/us/%s' % i.q) results = se.query(i.q) reps = schema.Politician.select(where=web.sqlors('id=', results)) if len(reps) > 1: return render.find_multi_reps(reps, congress_ranges) else: try: rep = reps[0] web.seeother('/p/%s' % rep.id) except IndexError: raise web.notfound() else: index = list(schema.District.select(order='name asc')) for i in index: i.politician = list( db.select('curr_politician', where='district_id = $i.name', vars=locals())) out = apipublish.publish(index, format) if out: return out return render.districtlist(index)
def things(self, query): type = query.get_type() if type: type_metedata = self.get_metadata(type) if type_metedata: type_id = type_metedata.id else: # Return empty result when type not found return [] else: type_id = None # type is required if there are conditions/sort on keys other than [key, type, created, last_modified] common_properties = ['key', 'type', 'created', 'last_modified'] _sort = query.sort and query.sort.key if _sort and _sort.startswith('-'): _sort = _sort[1:] type_required = bool([c for c in query.conditions if c.key not in common_properties]) or (_sort and _sort not in common_properties) if type_required and type is None: raise common.BadData("Type Required") class DBTable: def __init__(self, name, label=None): self.name = name self.label = label or name def sql(self): if self.label != self.name: return "%s as %s" % (self.name, self.label) else: return self.name def __repr__(self): return self.label class Literal: def __init__(self, value): self.value = value def __repr__(self): return self.value tables = {} def get_table(datatype, key): if key not in tables: assert type is not None, "Missing type" table = self.schema.find_table(type, datatype, key) label = 'd%d' % len(tables) tables[key] = DBTable(table, label) return tables[key] wheres = [] def process(c, ordering_func=None): # ordering_func is used when the query contains emebbabdle objects # # example: {'links': {'title: 'foo', 'url': 'http://example.com/foo'}} if c.datatype == 'ref': metadata = self.get_metadata(c.value) if metadata is None: # required object is not found so the query result wil be empty. # Raise StopIteration to indicate empty result. raise StopIteration c.value = metadata.id if c.op == '~': op = Literal('LIKE') c.value = c.value.replace('*', '%') else: op = Literal(c.op) if c.key in ['key', 'type', 'created', 'last_modified']: #@@ special optimization to avoid join with thing.type when there are non-common properties in the query. #@@ Since type information is already present in property table, #@@ getting property id is equivalent to join with type. if c.key == 'type' and type_required: return if isinstance(c.value, list): q = web.sqlors('thing.%s %s ' % (c.key, op), c.value) else: q = web.reparam('thing.%s %s $c.value' % (c.key, op), locals()) xwheres = [q] # Add thing table explicitly because get_table is not called tables['_thing'] = DBTable("thing") else: table = get_table(c.datatype, c.key) key_id = self.get_property_id(type_id, c.key) if not key_id: raise StopIteration q1 = web.reparam('%(table)s.key_id=$key_id' % {'table': table}, locals()) if isinstance(c.value, list): q2 = web.sqlors('%s.value %s ' % (table, op), c.value) else: q2 = web.reparam('%s.value %s $c.value' % (table, op), locals()) xwheres = [q1, q2] if ordering_func: xwheres.append(ordering_func(table)) wheres.extend(xwheres) def make_ordering_func(): d = web.storage(table=None) def f(table): d.table = d.table or table return '%s.ordering = %s.ordering' % (table, d.table) return f import readquery def process_query(q, ordering_func=None): for c in q.conditions: if isinstance(c, readquery.Query): process_query(c, ordering_func or make_ordering_func()) else: process(c, ordering_func) def process_sort(query): """Process sort field in the query and returns the db column to order by.""" if query.sort: sort_key = query.sort.key if sort_key.startswith('-'): ascending = " desc" sort_key = sort_key[1:] else: ascending = "" if sort_key in ['key', 'type', 'created', 'last_modified']: order = 'thing.' + sort_key # make sure c.key is valid # Add thing table explicitly because get_table is not called tables['_thing'] = DBTable("thing") else: table = get_table(query.sort.datatype, sort_key) key_id = self.get_property_id(type_id, sort_key) if key_id is None: raise StopIteration q = '%(table)s.key_id=$key_id' % {'table': table} wheres.append(web.reparam(q, locals())) order = table.label + '.value' return order + ascending else: return None try: process_query(query) # special care for case where query {}. if not tables: tables['_thing'] = DBTable('thing') order = process_sort(query) except StopIteration: return [] def add_joins(): labels = [t.label for t in tables.values()] def get_column(table): if table == 'thing': return 'thing.id' else: return table + '.thing_id' if len(labels) > 1: x = labels[0] xwheres = [get_column(x) + ' = ' + get_column(y) for y in labels[1:]] wheres.extend(xwheres) add_joins() wheres = wheres or ['1 = 1'] table_names = [t.sql() for t in tables.values()] t = self.db.transaction() if config.query_timeout: self.db.query("SELECT set_config('statement_timeout', $query_timeout, false)", dict(query_timeout=config.query_timeout)) if 'thing' in table_names: result = self.db.select( what='thing.key', tables=table_names, where=self.sqljoin(wheres, ' AND '), order=order, limit=query.limit, offset=query.offset, ) else: result = self.db.select( what='d0.thing_id', tables=table_names, where=self.sqljoin(wheres, ' AND '), order=order, limit=query.limit, offset=query.offset, ) result = self.db.query('SELECT key FROM thing where ' + web.sqlors('id = ', [r.thing_id for r in result])) result = [r.key for r in result] t.commit() return result
def GET(self, format=None): i = web.input(address=None) join = ['district' + ' LEFT OUTER JOIN politician ' 'ON (politician.district = district.name)'] pzip5 = re.compile(r'\d{5}') pzip4 = re.compile(r'\d{5}-\d{4}') pname = re.compile(r'[a-zA-Z\.]+') pdist = re.compile(r'[a-zA-Z]{2}\-\d{2}') dist_struct = { 'uri': apipublish.generic(lambda x: 'http://watchdog.net/us/' + x.name.lower()), 'type': 'District', 'name state district voting': apipublish.identity, 'wikipedia': apipublish.URI, } if i.get('zip'): if pzip4.match(i.zip): zip, plus4 = i.zip.split('-') dists = [x.district for x in db.select('zip4', where='zip=$zip and plus4=$plus4', vars=locals())] d_dists = db.select('district', where=web.sqlors('name=', dists)) out = apipublish.publish(dist_struct, d_dists, format) if out is not False: return out if len(dists) == 0: return render.find_none(i.zip) else: #@@ verify there aren't dupe districts raise web.seeother('/us/%s' % dists[0].lower()) if pzip5.match(i.zip): try: dists = zip2rep.zip2dist(i.zip, i.address) except zip2rep.BadAddress: return render.find_badaddr(i.zip, i.address) d_dists = db.select('district', where=web.sqlors('name=', dists)) out = apipublish.publish(dist_struct, d_dists, format) if out is not False: return out if len(dists) == 1: raise web.seeother('/us/%s' % dists[0].lower()) elif len(dists) == 0: return render.find_none(i.zip) else: dists = db.select(join, where=web.sqlors('name=', dists)) return render.find_multi(i.zip, dists) if pdist.match(i.zip): raise web.seeother('/us/%s' % i.zip) if pname.match(i.zip): in_name = i.zip.lower() name = in_name.replace(' ', '_') vars = {'name':'%%%s%%' % name} reps = db.select('politician', where="id like $name", vars=vars) if len(reps) == 0: vars = {'name':'%%%s%%' % in_name} reps = db.select('v_politician_name', where="name ilike $name", vars=vars) if len(reps) > 1: return render.find_multi_reps(reps) else: try: rep = reps[0] web.seeother('/p/%s' % rep.id) except IndexError: raise web.notfound else: out = apipublish.publish(dist_struct, db.select('district'), format) if out is not False: return out dists = db.select(join, order='name asc') return render.districtlist(dists)
def query_census(location, hr_keys): # Use DISTINCT since some hr_keys map to multiple internal_keys (but should # have same value). #q = db.select('census', what='SUM(DISTINCT(value))', where=web.sqlors('hr_key=', hr_keys)+' AND location='+web.sqlquote(location)) q = db.query('SELECT SUM(value) FROM (SELECT DISTINCT value, hr_key FROM census WHERE '+web.sqlors('hr_key=', hr_keys)+' AND district_id='+web.sqlquote(location)+') AS foo;') if not q: return None return q[0].sum
def representatives(self): ids = [x.id for x in db.select( 'curr_politician', where="district_id = $self.name", vars=locals())] return Politician.select(where=web.sqlors('id=', ids))
def get_blacklisted_authors_sql(): return web.sqlors( 'author=', [w.strip() for w in open('data/blacklist-authors.dat', 'U')])