def get_user_timesheet(cls, *, user=None, week_ending_date=None): """ Retrievs timesheet entries for a user a week ending on week_ending_date. """ if user is None: user = current_user if week_ending_date is None: week_ending_date = current_week_ending_date() rq = RawQuery(cls, """ WITH daynums(num) AS (VALUES (6),(5),(4),(3),(2),(1),(0)), week(day) AS (SELECT date(?, '-'||num||' day') FROM daynums) SELECT id, day as date, finished_at, started_at, user_id, modified_at, break_for_id, is_approved, approver_id, approved_at, comment FROM week LEFT JOIN entry ON "date" = day AND user_id = ? ORDER BY "date" ASC""", week_ending_date.isoformat(), user.id) return rq.execute()
def available_hash_list(cls): """ Need to figure out more about what this does... """ keys = cls.get_primary_keys() # pylint: disable=no-member table = cls._meta.db_table # pylint: enable=no-member db_keys = {} for key in keys: # pylint: disable=no-member if len(cls._meta.rel) > 0: rel_mod = cls._meta.rel.get(key) db_keys[key] = rel_mod.db_column else: db_keys[key] = key # pylint: enable=no-member hash_list = [] hash_dict = {} sql = "SELECT {0} FROM {1}".format(",".join(db_keys.values()), table) all_keys_query = RawQuery(cls, sql, None).dicts() for obj in all_keys_query.execute(): inst_key = index_hash(*obj.values()) hash_list.append(inst_key) entry = { 'key_list': {db_keys.get(key): obj[key] for key in db_keys.keys()}, 'index_hash': inst_key } hash_dict[inst_key] = entry return hash_list, hash_dict
def get_latest_change_date(playlist_number): rq = RawQuery(Tracksinplaylist, "SELECT GREATEST(IFNULL(t.c1,0),IFNULL(t.c2,0)) dateAdded " "FROM (SELECT MAX(dateAdded) c1, MAX(dateRemoved) c2 " "FROM TracksInPlaylist WHERE playlistNumber=%s) AS t", playlist_number) for obj in rq.execute(): return obj.dateAdded
def fetch_graph_full(genre_ids, color='red', max_position=100): """Return full graph w/o single nodes""" in_ = ','.join([str(i) for i in genre_ids]) rq = RawQuery( Similar, 'SELECT from_id, a1.name as from_label, to_id, a2.name as to_label, ' 'a1.degree_input as from_degree, a2.degree_input as to_degree ' 'FROM "similar" ' 'JOIN "artist" a1 ON (from_id = a1.id) ' 'JOIN "artist" a2 ON (to_id = a2.id) ' 'WHERE position < %d AND ' 'from_id IN (SELECT DISTINCT artist_id FROM artist_genre WHERE genre_id IN (%s)) ' 'AND to_id IN (SELECT DISTINCT artist_id FROM artist_genre WHERE genre_id IN (%s)) ' 'LIMIT %d' % (max_position, in_, in_, EXPORT_LIMIT)) nodes = {} edges = [] for obj in rq.execute(): nodes[obj.from_id] = { 'label': obj.from_label, 'color': color, 'size': obj.from_degree } nodes[obj.to_id] = { 'label': obj.to_label, 'color': color, 'size': obj.to_degree } edges.append((obj.from_id, obj.to_id)) return nodes, edges
def get_next_queue_item(cls, accounts, types, action_model, order_by): """Get the next order available to like, follow, retweet, etc... Returns an item that the accounts have not previously encountered, and that wasn't created by one of the accounts, and is one of the supported types. """ # TODO: convert to peewee when peewee supports joins on # sub-selects # see: https://groups.google.com/forum/#!topic/peewee-orm/Lm2qYpYo88k # TODO: also use service params = { 'action_table': action_model._meta.db_table, 'order_table': cls._meta.db_table, 'account_ids': ','.join(str(account.id) for account in accounts), 'type_ids': ','.join(str(type) for type in types), 'order_by': order_by, } # get latest order not created by account or seen by account sql = """ SELECT `%(order_table)s`.* FROM `%(order_table)s` LEFT JOIN (SELECT * FROM `%(action_table)s` WHERE `%(action_table)s`.account_id IN (%(account_ids)s)) AS b ON b.order_id = `%(order_table)s`.id WHERE b.id IS NULL AND `%(order_table)s`.account_id NOT IN (%(account_ids)s) AND `%(order_table)s`.status = 0 AND `%(order_table)s`.type IN (%(type_ids)s) ORDER BY %(order_by)s LIMIT 1 """ % params logging.debug(sql) rq = RawQuery(cls, sql) orders = [order for order in rq.execute()] if not orders: raise cls.DoesNotExist() return orders[0]
def fetch_graph_custom(rock_ids, metal_ids, max_position=100, primary=True): rock_in = ','.join([str(i) for i in rock_ids]) metal_in = ','.join([str(i) for i in metal_ids]) all_in_ = ','.join([rock_in, metal_in]) primary_condition = 'a1.is_primary = True AND a2.is_primary = True AND' if primary else '' rq = RawQuery( Similar, 'SELECT from_id, a1.name as from_label, to_id, a2.name as to_label, ' 'a1.degree_input as from_degree, a2.degree_input as to_degree, ' 'CASE WHEN (SELECT COUNT(*) FROM artist_genre ' 'WHERE genre_id IN (%s) AND artist_id = from_id) > 0 ' 'THEN 1 ELSE 0 END AS from_is_rock, ' 'CASE WHEN (SELECT COUNT(*) FROM artist_genre ' 'WHERE genre_id IN (%s) AND artist_id = from_id) > 0 ' 'THEN 1 ELSE 0 END AS from_is_metal, ' 'CASE WHEN (SELECT COUNT(*) FROM artist_genre ' 'WHERE genre_id IN (%s) AND artist_id = to_id) > 0 ' 'THEN 1 ELSE 0 END AS to_is_rock, ' 'CASE WHEN (SELECT COUNT(*) FROM artist_genre ' 'WHERE genre_id IN (%s) AND artist_id = to_id) > 0 ' 'THEN 1 ELSE 0 END AS to_is_metal ' 'FROM "similar" ' 'JOIN "artist" a1 ON (from_id = a1.id) ' 'JOIN "artist" a2 ON (to_id = a2.id) ' 'WHERE %s position < %d' 'AND from_id IN (SELECT DISTINCT artist_id FROM artist_genre WHERE genre_id IN (%s)) ' 'AND to_id IN (SELECT DISTINCT artist_id FROM artist_genre WHERE genre_id IN (%s)) ' 'LIMIT %d' % (rock_in, metal_in, rock_in, metal_in, primary_condition, max_position, all_in_, all_in_, EXPORT_LIMIT)) def select_color(is_rock, is_metal): if is_rock and is_metal: return COLOR_BOTH elif is_rock: return COLOR_ROCK elif is_metal: return COLOR_METAL else: return 'grey' nodes = {} edges = [] for obj in rq.execute(): nodes[obj.from_id] = { 'label': obj.from_label, 'color': select_color(obj.from_is_rock, obj.from_is_metal), 'size': obj.from_degree } nodes[obj.to_id] = { 'label': obj.to_label, 'color': select_color(obj.to_is_rock, obj.to_is_metal), 'size': obj.to_degree } edges.append((obj.from_id, obj.to_id)) return nodes, edges
def get_archives(self): if isinstance(db.database, peewee.SqliteDatabase): return RawQuery( Post, "select strftime('%Y',created) year,strftime('%m',created) month,count(id) count from posts group by month" ) elif isinstance(db.database, peewee.MySQLDatabase): return RawQuery( Post, "select date_format(created,'%Y') year,date_format(created,'%m') month,count(id) count from posts group by month" ) return None
def seed_towns(): print "Seeding Towns tables..." towns_file = 'current/data/uk-towns-list/uk-towns.csv' print towns_file Town.drop_table(fail_silently=True) Town.create_table(fail_silently=True) rq = RawQuery(Town, "COPY uk_towns FROM '{0}' DELIMITERS ',' CSV HEADER".format(towns_file)) try: rq.execute() except TypeError, e: # Wierdness here assert Town.select().count() == 43143
def filter(self, user_input_string): condition = collect_all_filters(user_input_string) q = RawQuery(RecordRevision, """ SELECT DISTINCT "last_revs".* FROM last_revision as last_revs LEFT OUTER JOIN record_entity ON "last_revs".id = record_entity.revision_id {condition}""" .format(condition=condition.replace('last_revision', '"last_revs"'))) return q.execute()
def fetch_top_by_genre(genre_ids=None, reverse=False, limit=10): where = '' if genre_ids: in_ = 'NOT IN' if reverse else 'IN' where = 'WHERE id %s (SELECT DISTINCT artist_id FROM artist_genre WHERE genre_id IN (%s)) ' \ % (in_, ','.join([str(i) for i in genre_ids])) rq = RawQuery( Similar, 'SELECT name, degree_input ' 'FROM "artist" %s ' 'ORDER BY degree_input DESC ' 'LIMIT %d' % (where, limit)) return [(i.name, i.degree_input) for i in rq.execute()]
def seed_towns(): print "Seeding Towns tables..." towns_file = 'current/data/uk-towns-list/uk-towns.csv' print towns_file Town.drop_table(fail_silently=True) Town.create_table(fail_silently=True) rq = RawQuery( Town, "COPY uk_towns FROM '{0}' DELIMITERS ',' CSV HEADER".format( towns_file)) try: rq.execute() except TypeError, e: # Wierdness here assert Town.select().count() == 43143
async def get_rank(self): return await database_manager.scalar( RawQuery( User, 'SELECT Count(*) FROM "user" AS t1 WHERE ("t1"."balance"::numeric > {})'.format(self.balance) ) )
def historique_entrees_par_jour(): # -> List[Dict[int, date, int]]: donnees = RawQuery( sql= "SELECT COUNT(e.id) as n_entrees, DATE(e.horodatage) as jour, min(e.pourcentage_confiance) as conf_min, AVG(e.pourcentage_confiance) as conf_avg from entree as e group by date(e.horodatage);" ).bind(maBDD).execute() a = list(donnees) #print(a) return a
async def calculate_server_ranks(self): maps_on_server = [ map_on_server.id for map_on_server in self.instance.map_manager.maps ] minimum_records_required_setting = await self.setting_records_required.get_value( ) minimum_records_required = minimum_records_required_setting if minimum_records_required_setting >= 3 else 3 maximum_record_rank = await self.get_maximum_record_rank() query = RawQuery( Rank, """ -- Reset the current ranks to insert new ones later one. TRUNCATE TABLE rankings_rank; -- Limit on maximum ranked records. SET @ranked_record_limit = {}; -- Minimum amount of ranked records required to acquire a rank. SET @minimum_ranked_records = {}; -- Total amount of maps active on the server. SET @active_map_count = {}; -- Set the rank/current rank variables to ensure correct first calculation SET @player_rank = 0; SET @current_rank = 0; INSERT INTO rankings_rank (player_id, average, calculated_at) SELECT player_id, average, calculated_at FROM ( SELECT player_id, -- Calculation: the sum of the record ranks is combined with the ranked record limit times the amount of unranked maps. -- Divide this summed ranking by the amount of active maps on the server, and an average calculated rank will be returned. ROUND((SUM(player_rank) + (@active_map_count - COUNT(player_rank)) * @ranked_record_limit) / @active_map_count * 10000, 0) AS average, NOW() AS calculated_at, COUNT(player_rank) AS ranked_records_count FROM ( SELECT id, map_id, player_id, score, @player_rank := IF(@current_rank = map_id, @player_rank + 1, 1) AS player_rank, @current_rank := map_id FROM localrecord WHERE map_id IN ({}) ORDER BY map_id, score ASC ) AS ranked_records WHERE player_rank <= @ranked_record_limit GROUP BY player_id ) grouped_ranks WHERE ranked_records_count >= @minimum_ranked_records """.format(maximum_record_rank, minimum_records_required, str(len(maps_on_server)), ", ".join(str(map_id) for map_id in maps_on_server))) await Rank.execute(query)
async def get_delta(ticker, value, column_type): sql_query = f''' with id as (select id from Ticker where symbol = '{ticker}'), prices as (select * from Price join id on id.id = Price.ticker_id) select * from Price, (select a.date as date_from, b.date as date_to, a.ticker_id as t_id from prices as a, prices as b where b.{column_type} - a.{column_type} > {value} and b.date > a.date order by b.date - a.date limit 1) as c where Price.ticker_id=c.t_id and Price.date >= c.date_from and Price.date <= c.date_to order by Price.date desc ''' return [r.as_json() for r in await raw_query(RawQuery(Price, sql_query))]
async def get_player_map_ranks(self, player, sort_query=''): maximum_record_rank = await self.get_maximum_record_rank() query = '''SELECT map.id, map.name, map.uid, map.author_login, ranked_records.player_rank FROM ( SELECT id, map_id, player_id, score, @player_rank := IF(@current_rank = map_id, @player_rank + 1, 1) AS player_rank, @current_rank := map_id FROM localrecord r, (SELECT @player_rank := 0) pr, (SELECT @current_rank := 0) cr ORDER BY map_id, score ASC ) AS ranked_records INNER JOIN map ON map.id = map_id WHERE player_rank <= {} AND player_id = {} {}'''.format(maximum_record_rank, player.id, sort_query) select_query = RawQuery(RankedMap, query) ranked_maps = [ map for map in await RankedMap.execute(select_query) if map.id in [server_map.id for server_map in self.instance.map_manager.maps] ] return ranked_maps
def historique_jour_et_nom_rollup(): rows = RawQuery( sql= "SELECT date(e.horodatage) as jour, p.nom as nom, count(e.id) as n_entrees, min(e.pourcentage_confiance) as conf_min, AVG(e.pourcentage_confiance) as conf_avg from entree as e, personne as p where p.id=e.personne_id group by date(e.horodatage), p.nom with rollup;" ).bind(maBDD).execute() return rows
# get number of iframes # get number of twitter / youtube embeds sql = "SELECT * FROM article a1 INNER JOIN article a2 WHERE a1.url=a2.url" w = Article.select(SQL(sql)) w.count() sql_2 = "SELECT url, count(*) FROM article GROUP BY 1 HAVING count(*) > 0" # w2 = Article.select(Article.url).count().group_ # w2 sql_3 = "SELECT url FROM article WHERE url IN (SELECT url FROM article GROUP BY url HAVING count(*) > 1)" sql_3 = "SELECT url FROM article WHERE website_id = 2" rq = RawQuery(sql_3) aq = Article.select(SQL(sql_3)) i = 0 c = db_connection.db_connection.execute_sql(sql_3) i = 0 for row in c.fetchall(): i += 1 print(row) print(i) for a in aq: i += 1 print(i) ww = Article.select(SQL(sql_3)).execute()
def connect(self): super(BTSdb, self).connect() pragma = RawQuery(BaseModel, "PRAGMA foreign_keys = ON;") self.execute(pragma)
def update_degree(): rq = RawQuery( Similar, 'UPDATE "artist" SET ' 'degree_output = (SELECT COUNT(*) FROM "similar" WHERE from_id = id), ' 'degree_input = (SELECT COUNT(*) FROM "similar" WHERE to_id = id)') rq.execute()
def __init__(self, *args, **kwargs): BaseRawQuery.__init__(self, *args, **kwargs) gen.Future.__init__(self) self._future = None
def get_by_words(cls, words): words = "%" + words + "%" rq = RawQuery(cls, "select * from words where words like ?", words) return rq.execute()