def make_coordinate_and_bounding_filters(self, params, properties): if not CoaddObjectsDBHelper._is_coordinate_and_bounding_defined( params, properties): raise ("Coordinate and bounding filters are not defined.") coordinate = params.get('coordinate', None).split(',') bounding = params.get('bounding', None).split(',') property_ra = properties.get("pos.eq.ra;meta.main", None).lower() property_ra_t = DBBase.get_column_obj(self.table, property_ra) property_dec = properties.get("pos.eq.dec;meta.main", None).lower() property_dec_t = DBBase.get_column_obj(self.table, property_dec) ra = float(coordinate[0]) dec = float(coordinate[1]) bra = float(bounding[0]) bdec = float(bounding[1]) _filters = list() _filters.append( between(literal_column(str(property_ra_t)), literal_column(str(ra - bra)), literal_column(str(ra + bra)))) _filters.append( between(literal_column(str(property_dec_t)), literal_column(str(dec - bdec)), literal_column(str(dec + bdec)))) return and_(*_filters)
def get_available_venues( start, end, sitting=None ): """get all venues that are not booked for a sitting (but sitting if given) in the given time period SQL: SELECT * FROM venues WHERE venues.venue_id NOT IN (SELECT sitting.venue_id FROM sitting WHERE (sitting.start_date BETWEEN '2000-01-01' AND '2000-01-02' OR sitting.end_date BETWEEN '2000-01-01' AND '2000-01-02' OR '2000-01-01' BETWEEN sitting.start_date AND sitting.end_date OR '2000-01-02' BETWEEN sitting.start_date AND sitting.end_date) AND sitting.venue_id IS NOT NULL) """ session = Session() query = session.query(domain.Venue) b_filter = sql.and_( sql.or_( sql.between(schema.sitting.c.start_date, start, end), sql.between(schema.sitting.c.end_date, start, end), sql.between(start, schema.sitting.c.start_date, schema.sitting.c.end_date), sql.between(end, schema.sitting.c.start_date, schema.sitting.c.end_date) ), schema.sitting.c.venue_id != None) if sitting: if sitting.sitting_id: b_filter = sql.and_(b_filter, schema.sitting.c.sitting_id != sitting.sitting_id) query = query.filter(sql.not_(schema.venue.c.venue_id.in_( sql.select( [schema.sitting.c.venue_id] ).where(b_filter) ))) venues = query.all() return venues
def check_venue_bookings( start, end, venue, sitting=None ): """ return all sittings (but sitting if given) a venue is booked for in the period """ assert( type(start) == datetime.datetime ) assert( type(end) == datetime.datetime ) session = Session() b_filter = sql.and_(sql.or_( sql.between(schema.sittings.c.start_date, start, end), sql.between(schema.sittings.c.end_date, start, end), sql.between(start, schema.sittings.c.start_date, schema.sittings.c.end_date), sql.between(end, schema.sittings.c.start_date, schema.sittings.c.end_date) ), schema.sittings.c.venue_id == venue.venue_id) if sitting: if sitting.sitting_id: b_filter = sql.and_(b_filter, schema.sittings.c.sitting_id != sitting.sitting_id) query = session.query(BookedVenue).filter(b_filter) venues = query.all() #session.close() return venues
def get_available_venues( start, end, sitting=None ): """get all venues that are not booked for a sitting (but sitting if given) in the given time period SQL: SELECT * FROM venues WHERE venues.venue_id NOT IN (SELECT sitting.venue_id FROM sitting WHERE (sitting.start_date BETWEEN '2000-01-01' AND '2000-01-02' OR sitting.end_date BETWEEN '2000-01-01' AND '2000-01-02' OR '2000-01-01' BETWEEN sitting.start_date AND sitting.end_date OR '2000-01-02' BETWEEN sitting.start_date AND sitting.end_date) AND sitting.venue_id IS NOT NULL) """ session = Session() query = session.query(domain.Venue) b_filter = sql.and_( sql.or_( sql.between(schema.sitting.c.start_date, start, end), sql.between(schema.sitting.c.end_date, start, end), sql.between(start, schema.sitting.c.start_date, schema.sitting.c.end_date), sql.between(end, schema.sitting.c.start_date, schema.sitting.c.end_date) ), schema.sitting.c.venue_id != None) if sitting: if sitting.sitting_id: b_filter = sql.and_(b_filter, schema.sitting.c.sitting_id != sitting.sitting_id) query = query.filter(sql.not_(schema.venue.c.venue_id.in_( sql.select( [schema.sitting.c.venue_id] ).where(b_filter) ))) venues = query.all() return venues
def __call__(self, user_ids, session): """ Parameters: user_ids : list of mediawiki user ids to restrict computation to session : sqlalchemy session open on a mediawiki database Returns: { user id: 1 if they're a rolling new active editor, 0 otherwise for all cohort users, or all users that have edits in the time period } """ number_of_edits = int(self.number_of_edits.data) rolling_days = int(self.rolling_days.data) end_date = self.end_date.data start_date = end_date - timedelta(days=rolling_days) newly_registered = session.query(Logging.log_user) \ .filter(Logging.log_type == 'newusers') \ .filter(Logging.log_action == 'create') \ .filter(between(Logging.log_timestamp, start_date, end_date)) filtered_new = self.filter( newly_registered, user_ids, column=Logging.log_user ).subquery() rev_user = label('user_id', Revision.rev_user) ar_user = label('user_id', Archive.ar_user) count = label('count', func.count()) revisions = session.query(rev_user, count)\ .filter(between(Revision.rev_timestamp, start_date, end_date))\ .filter(Revision.rev_user.in_(filtered_new))\ .group_by(Revision.rev_user) archived = session.query(ar_user, count)\ .filter(between(Archive.ar_timestamp, start_date, end_date))\ .filter(Archive.ar_user.in_(filtered_new))\ .group_by(Archive.ar_user) bot_user_ids = session.query(MediawikiUserGroups.ug_user)\ .filter(MediawikiUserGroups.ug_group == 'bot')\ .subquery() new_edits = revisions.union_all(archived).subquery() new_edits_by_user = session.query(new_edits.c.user_id)\ .filter(new_edits.c.user_id.notin_(bot_user_ids))\ .group_by(new_edits.c.user_id)\ .having(func.SUM(new_edits.c.count) >= number_of_edits) metric_results = {r[0]: {self.id : 1} for r in new_edits_by_user.all()} if user_ids is None: return metric_results else: return { uid: metric_results.get(uid, self.default_result) for uid in user_ids }
def __call__(self, user_ids, session): """ Parameters: user_ids : list of mediawiki user ids to restrict computation to session : sqlalchemy session open on a mediawiki database Returns: { user id: 1 if they're a rolling new active editor, 0 otherwise for all cohort users, or all users that have edits in the time period } """ number_of_edits = int(self.number_of_edits.data) rolling_days = int(self.rolling_days.data) end_date = self.end_date.data start_date = end_date - timedelta(days=rolling_days) newly_registered = session.query(Logging.log_user) \ .filter(Logging.log_type == 'newusers') \ .filter(Logging.log_action == 'create') \ .filter(between(Logging.log_timestamp, start_date, end_date)) filtered_new = self.filter(newly_registered, user_ids, column=Logging.log_user).subquery() rev_user = label('user_id', Revision.rev_user) ar_user = label('user_id', Archive.ar_user) count = label('count', func.count()) revisions = session.query(rev_user, count)\ .filter(between(Revision.rev_timestamp, start_date, end_date))\ .filter(Revision.rev_user.in_(filtered_new))\ .group_by(Revision.rev_user) archived = session.query(ar_user, count)\ .filter(between(Archive.ar_timestamp, start_date, end_date))\ .filter(Archive.ar_user.in_(filtered_new))\ .group_by(Archive.ar_user) bot_user_ids = session.query(MediawikiUserGroups.ug_user)\ .filter(MediawikiUserGroups.ug_group == 'bot')\ .subquery() new_edits = revisions.union_all(archived).subquery() new_edits_by_user = session.query(new_edits.c.user_id)\ .filter(new_edits.c.user_id.notin_(bot_user_ids))\ .group_by(new_edits.c.user_id)\ .having(func.SUM(new_edits.c.count) >= number_of_edits) metric_results = {r[0]: {self.id: 1} for r in new_edits_by_user.all()} if user_ids is None: return metric_results else: return { uid: metric_results.get(uid, self.default_result) for uid in user_ids }
def _create_stm(self, params): # Parametros de Paginacao limit = params.get('limit', 1000) # Parametros de Ordenacao ordering = params.get('ordering', None) # Parametro Columns self.str_columns = list() if params.get('columns', None) is not None: clmns = params.get('columns', None).split(',') for clmn in clmns: self.str_columns.append(clmn.lower()) columns = DBBase.create_columns_sql_format(self.table, self.str_columns) coordinate = params.get('coordinate', None).split(',') bounding = params.get('bounding', None).split(',') filters = list() if coordinate and bounding: property_ra_t = DBBase.get_column_obj(self.table, 'ra') property_dec_t = DBBase.get_column_obj(self.table, 'dec') ra = float(coordinate[0]) dec = float(coordinate[1]) bra = float(bounding[0]) bdec = float(bounding[1]) _filters = list() _filters.append( between(literal_column(str(property_ra_t)), literal_column(str(ra - bra)), literal_column(str(ra + bra)))) _filters.append( between(literal_column(str(property_dec_t)), literal_column(str(dec - bdec)), literal_column(str(dec + bdec)))) filters.append(and_(*_filters)) maglim = params.get('maglim', None) if maglim is not None: # TODO a magnitude continua com a propriedade hardcoded maglim = float(maglim) mag_t = DBBase.get_column_obj(self.table, 'mag_auto_i') filters.append( literal_column(str(mag_t)) <= literal_column(str(maglim))) stm = select(columns).select_from(self.table).where(and_(*filters)) if limit: stm = stm.limit(literal_column(str(limit))) return stm
def listenerdata(start, stop): from rfk.site import app app.logger.warn(start) app.logger.warn(stop) stop = parse_datetimestring(stop) start = parse_datetimestring(start) app.logger.warn(start) app.logger.warn(stop) ret = {'data': {}, 'shows': []} streams = Stream.query.all() for stream in streams: ret['data'][str(stream.mount)] = [] #just set an initial stating point from before the starting point stats = stream.statistic.get(stop=start, num=1, reverse=True) c = 0 for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(start)), int(c))) #fill in the actual datapoints streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(start=start, stop=stop) for stat in stats: ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stat.timestamp)), int(stat.value))) streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(stop=stop, num=1, reverse=True) for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stop)), int(c))) #get the shows for the graph shows = Show.query.filter(between(Show.begin, start, stop) \ | between(Show.end, start, stop)).order_by(Show.begin.asc()).all() for show in shows: sstart = to_timestamp(to_user_timezone(show.begin)) if show.end: send = to_timestamp(to_user_timezone(show.end)) else: send = to_timestamp(to_user_timezone(now())) ret['shows'].append({'name': show.name, 'b': sstart, 'e': send}) return jsonify(ret)
def listenerdata(start, stop): from rfk.site import app app.logger.warn(start) app.logger.warn(stop) stop = parse_datetimestring(stop) start = parse_datetimestring(start) app.logger.warn(start) app.logger.warn(stop) ret = {'data': {}, 'shows': []} streams = Stream.query.all() for stream in streams: ret['data'][str(stream.mount)] = [] #just set an initial stating point from before the starting point stats = stream.statistic.get(stop=start, num=1, reverse=True) c = 0 for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append((to_timestamp(to_user_timezone(start)), int(c))) #fill in the actual datapoints streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(start=start, stop=stop) for stat in stats: ret['data'][str(stream.mount)].append( (to_timestamp(to_user_timezone(stat.timestamp)), int(stat.value))) streams = Stream.query.all() for stream in streams: stats = stream.statistic.get(stop=stop, num=1, reverse=True) for stat in stats: c = stat.value if not stats: c = 0 ret['data'][str(stream.mount)].append((to_timestamp(to_user_timezone(stop)), int(c))) #get the shows for the graph shows = Show.query.filter(between(Show.begin, start, stop) \ | between(Show.end, start, stop)).order_by(Show.begin.asc()).all() for show in shows: sstart = to_timestamp(to_user_timezone(show.begin)) if show.end: send = to_timestamp(to_user_timezone(show.end)) else: send = to_timestamp(to_user_timezone(now())) ret['shows'].append({'name': show.name, 'b': sstart, 'e': send}) return jsonify(ret)
def __call__(self, user_ids, session): """ Parameters: user_ids : list of mediawiki user ids to restrict computation to session : sqlalchemy session open on a mediawiki database Returns: dictionary from user ids to: 1 if they're a rolling active editor, 0 if not """ number_of_edits = int(self.number_of_edits.data) rolling_days = int(self.rolling_days.data) end_date = self.end_date.data start_date = end_date - timedelta(days=rolling_days) rev_user = label("user_id", Revision.rev_user) ar_user = label("user_id", Archive.ar_user) count = label("count", func.count()) revisions = ( session.query(rev_user, count) .filter(between(Revision.rev_timestamp, start_date, end_date)) .group_by(Revision.rev_user) ) revisions = self.filter(revisions, user_ids, column=Revision.rev_user) archived = ( session.query(ar_user, count) .filter(between(Archive.ar_timestamp, start_date, end_date)) .group_by(Archive.ar_user) ) archived = self.filter(archived, user_ids, column=Archive.ar_user) bot_user_ids = ( session.query(MediawikiUserGroups.ug_user).filter(MediawikiUserGroups.ug_group == "bot").subquery() ) edits = revisions.union_all(archived).subquery() edits_by_user = ( session.query(edits.c.user_id) .filter(edits.c.user_id.notin_(bot_user_ids)) .group_by(edits.c.user_id) .having(func.SUM(edits.c.count) >= number_of_edits) ) metric_results = {r[0]: {self.id: 1} for r in edits_by_user.all()} if user_ids is None: return metric_results else: return {uid: metric_results.get(uid, self.default_result) for uid in user_ids}
def recent_games_q(server_id=None, map_id=None, player_id=None, game_type_cd=None, cutoff=None, force_player_id=False): ''' Returns a SQLA query of recent game data. Parameters filter the results returned if they are provided. If not, it is assumed that results from all servers and maps is desired. The cutoff parameter provides a way to limit how far back to look when querying. Only games that happened on or after the cutoff (which is a datetime object) will be returned. ''' pgstat_alias = aliased(PlayerGameStat, name='pgstat_alias') recent_games_q = DBSession.query(Game.game_id, GameType.game_type_cd, Game.winner, Game.start_dt, GameType.descr.label('game_type_descr'), Server.server_id, Server.name.label('server_name'), Map.map_id, Map.name.label('map_name'), PlayerGameStat.player_id, PlayerGameStat.nick, PlayerGameStat.rank, PlayerGameStat.team, PlayerGameStat.elo_delta).\ filter(Game.server_id==Server.server_id).\ filter(Game.map_id==Map.map_id).\ filter(Game.game_id==PlayerGameStat.game_id).\ filter(Game.game_type_cd==GameType.game_type_cd).\ order_by(expr.desc(Game.game_id)) # the various filters provided get tacked on to the query if server_id is not None: recent_games_q = recent_games_q.\ filter(Server.server_id==server_id) if map_id is not None: recent_games_q = recent_games_q.\ filter(Map.map_id==map_id) # Note: force_player_id makes the pgstat row returned be from the # specified player_id. Otherwise it will just look for a game # *having* that player_id, but returning the #1 player's pgstat row if player_id is not None: if force_player_id: recent_games_q = recent_games_q.\ filter(PlayerGameStat.player_id==player_id) else: recent_games_q = recent_games_q.\ filter(PlayerGameStat.scoreboardpos==1).\ filter(Game.game_id==pgstat_alias.game_id).\ filter(pgstat_alias.player_id==player_id) else: recent_games_q = recent_games_q.\ filter(PlayerGameStat.scoreboardpos==1) if game_type_cd is not None: recent_games_q = recent_games_q.\ filter(Game.game_type_cd==game_type_cd.lower()) if cutoff is not None: right_now = datetime.utcnow() recent_games_q = recent_games_q.\ filter(expr.between(Game.create_dt, cutoff, right_now)) return recent_games_q
def get_unavailable_resources(start, end): """Get all resources that are booked in the given time period. """ assert (type(start) == datetime.datetime) assert (type(end) == datetime.datetime) session = Session() b_filter = sql.or_( sql.between(schema.sittings.c.start_date, start, end), sql.between(schema.sittings.c.end_date, start, end), sql.between(start, schema.sittings.c.start_date, schema.sittings.c.end_date), sql.between(end, schema.sittings.c.start_date, schema.sittings.c.end_date)) query = session.query(BookedResources).filter(b_filter) resources = query.all() return resources
def getFilter(date): return sql.or_( sql.between(date, schema.group.c.start_date, schema.group.c.end_date), sql.and_( schema.group.c.start_date<=date, schema.group.c.end_date==None))
def list_files(request): """ LIST all the files uploaded in the last 5 hours """ from sqlalchemy.sql.expression import between from datetime import datetime, timedelta session = DBSession() now = datetime.now() # timeframe could be selectable from an interface for instance # easy to implement: analyze pars or set default if missing... timeframe = {'hours': 5, 'minutes': 0, 'seconds': 0} delta = now - timedelta(**timeframe) result = session.query(MyModel)\ .filter(between(MyModel.timestamp, delta, now))\ .order_by(MyModel.timestamp.desc()) if result.count() > 0: files = [] for res in result: print res.path files.append({'id': res.id, 'file_name': res.name, 'file_size': res.size, 'file_type': res.filetype, 'download_url': 'http://localhost:9000/getfile/%s' % res.id }) return files else: # Could add a No Content (204) response return []
def top_maps_by_times_played_q(cutoff_days, region=None, game_type_cd=None): """ Query to retrieve the top maps by the amount of times it was played during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_maps_q = DBSession.query(Game.map_id, Map.name, func.count()).\ filter(Map.map_id==Game.map_id).\ filter(expr.between(Game.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.count())).\ group_by(Game.map_id).\ group_by(Map.name) if region and region != "" and region != "0": top_maps_q = top_maps_q.filter(Server.region == region).filter( Server.server_id == Game.server_id) if game_type_cd and game_type_cd != "": top_maps_q = top_maps_q.filter(Game.game_type_cd == game_type_cd) return top_maps_q
def top_maps_by_times_played_q(cutoff_days, region = None, game_type_cd = None): """ Query to retrieve the top maps by the amount of times it was played during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_maps_q = DBSession.query(Game.map_id, Map.name, func.count()).\ filter(Map.map_id==Game.map_id).\ filter(expr.between(Game.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.count())).\ group_by(Game.map_id).\ group_by(Map.name) if region and region != "" and region != "0": top_maps_q = top_maps_q.filter(Server.region==region).filter(Server.server_id==Game.server_id) if game_type_cd and game_type_cd != "": top_maps_q = top_maps_q.filter(Game.game_type_cd == game_type_cd) return top_maps_q
def __call__(self, user_ids, session): """ Parameters: user_ids : list of mediawiki user ids to restrict computation to session : sqlalchemy session open on a mediawiki database Returns: dictionary from user ids to: 1 if they're a rolling active editor, 0 if not """ number_of_edits = int(self.number_of_edits.data) rolling_days = int(self.rolling_days.data) end_date = self.end_date.data start_date = end_date - timedelta(days=rolling_days) rev_user = label('user_id', Revision.rev_user) ar_user = label('user_id', Archive.ar_user) count = label('count', func.count()) revisions = session.query(rev_user, count)\ .filter(between(Revision.rev_timestamp, start_date, end_date))\ .group_by(Revision.rev_user) revisions = self.filter(revisions, user_ids, column=Revision.rev_user) archived = session.query(ar_user, count)\ .filter(between(Archive.ar_timestamp, start_date, end_date))\ .group_by(Archive.ar_user) archived = self.filter(archived, user_ids, column=Archive.ar_user) bot_user_ids = session.query(MediawikiUserGroups.ug_user)\ .filter(MediawikiUserGroups.ug_group == 'bot')\ .subquery() edits = revisions.union_all(archived).subquery() edits_by_user = session.query(edits.c.user_id)\ .filter(edits.c.user_id.notin_(bot_user_ids))\ .group_by(edits.c.user_id)\ .having(func.SUM(edits.c.count) >= number_of_edits) metric_results = {r[0]: {self.id: 1} for r in edits_by_user.all()} if user_ids is None: return metric_results else: return { uid: metric_results.get(uid, self.default_result) for uid in user_ids }
def get_unavailable_resources(start, end): """Get all resources that are booked in the given time period. """ assert(type(start) == datetime.datetime) assert(type(end) == datetime.datetime) session = Session() b_filter = sql.or_( sql.between(schema.sitting.c.start_date, start, end), sql.between(schema.sitting.c.end_date, start, end), sql.between(start, schema.sitting.c.start_date, schema.sitting.c.end_date), sql.between(end, schema.sitting.c.start_date, schema.sitting.c.end_date) ) query = session.query(BookedResources).filter(b_filter) resources = query.all() return resources
def detalleNombre(self, dia, nombre): inicio = datetime(dia.year, dia.month, dia.day, 0, 0) fin = datetime(dia.year, dia.month, dia.day, 23, 59) return dict(recibos=model.Recibo.query.filter( model.Recibo.cliente.like(u"%{0}%".format(nombre)) ).filter(between(model.Recibo.dia, inicio, fin)).all())
def iplatlondel(): # If only google actually supported sessions! Darn them! # if 'oldolddatetime' in session: # serverhits=m.ServerHit.query.filter(between(m.ServerHit.insdatetime,session.get('oldolddatetime'),session.get('olddatetime'))).limit(4000).all() serverhits=m.ServerHit.query.filter(between(m.ServerHit.insdatetime,datetime.datetime.now()-datetime.timedelta(seconds=8),datetime.datetime.now()-datetime.timedelta(seconds=2))).all() for row in serverhits: yield { 'del': '<Placemark targetId="A'+str(row.id)+'"></Placemark>' }
def resolve_schedule(self, info, **args): room = args.get("room") last_day = args.get("day") first_day = last_day - timedelta(days=6) res = Schedule.get_query(info).\ filter(ScheduleModel.id_rooms == room).\ filter( or_( between(last_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(first_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(ScheduleModel.sem_beg, first_day, last_day), between(ScheduleModel.sem_end, first_day, last_day) ) ).all() return res
def resolve_test(self, info, **args): day = args.get("day") facility = args.get("facility") now = args.get("now") res = Schedule.get_query(info).\ join(StudytimeModel).\ join(RoomsModel, CorpustypeModel).\ filter(CorpustypeModel.id == facility).\ filter( between(day, ScheduleModel.sem_beg, ScheduleModel.sem_end) ).\ filter( between(now, StudytimeModel.time_start, StudytimeModel.time_end) ).\ all() return res
def resolve_filter_group(self, info, **args): last_day = args.get("day") group = args.get("group") first_day = last_day - timedelta(days=6) res = Schedule.get_query(info).\ join(GroupshModel).\ filter(GroupshModel.id == group).\ filter( or_( between(last_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(first_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(ScheduleModel.sem_beg, first_day, last_day), between(ScheduleModel.sem_end, first_day, last_day) ) ).all() return res
def dia(self, dia): """Muestra los recibos de un dia""" inicio = datetime(dia.year, dia.month, dia.day, 0, 0) fin = datetime(dia.year, dia.month, dia.day, 23, 59) return dict(recibos=model.Recibo.query.filter( between(model.Recibo.dia, inicio, fin)).all(), dia=dia)
def check_bookings(start, end, resource): """Return all sittings a resource is booked for in the period. """ assert (type(resource) == domain.Resource) assert (type(start) == datetime.datetime) assert (type(end) == datetime.datetime) session = Session() b_filter = sql.and_( schema.resources.c.resource_id == resource.resource_id, sql.or_( sql.between(schema.sittings.c.start_date, start, end), sql.between(schema.sittings.c.end_date, start, end), sql.between(start, schema.sittings.c.start_date, schema.sittings.c.end_date), sql.between(end, schema.sittings.c.start_date, schema.sittings.c.end_date))) query = session.query(BookedResources).filter(b_filter) bookings = query.all() return bookings
def get_sittings(self): formatter = self.request.locale.dates.getFormatter("date", "full") session = Session() query = ( session.query(domain.GroupSitting) .filter( sql.and_( schema.sittings.c.status.in_(get_states("groupsitting", tagged=["public"])), sql.between(schema.sittings.c.start_date, self.start_date, self.end_date), ) ) .order_by(schema.sittings.c.start_date) .options( eagerload("group"), # eagerload('sitting_type'), eagerload("item_schedule"), eagerload("item_schedule.item"), ) ) sittings = query.all() day = u"" day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == "parliament": _url = url.set_url_context("/business/sittings/obj-%i" % (sitting.sitting_id)) elif sitting.group.type == "committee": _url = url.set_url_context( "/business/committees/obj-%i/sittings/obj-%i" % (sitting.group.group_id, sitting.sitting_id) ) else: _url = "#" s_list.append( { "start": sitting.start_date.strftime("%H:%M"), "end": sitting.end_date.strftime("%H:%M"), "type": sitting.group.type, "name": sitting.group.short_name, "url": _url, "items": self.get_sitting_items(sitting), } ) s_dict["day"] = day s_dict["sittings"] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def group_sittings_filter(self): if self.end_date: date_filter_expression = sql.between( schema.group_sittings.c.start_date, self.start_date, self.end_date) else: date_filter_expression = (schema.group_sittings.c.start_date >= self.start_date) return sql.and_( schema.group_sittings.c.status.in_(self._agenda_private_state_ids), date_filter_expression)
def on_message(self, message): command = message.split('|')[0] query = '|'.join(message.split('|')[1:]) if command == 'criterion': criterion = query.split('|')[0] value = '|'.join(query.split('|')[1:]) if criterion == 'date': try: value = datetime.strptime( value.replace('+', ' '), '%Y-%m-%d %H:%M:%S') except ValueError: try: value = datetime.strptime('%Y-%m-%d') except ValueError: value = datetime.now() filter_ = between(Visit.date, value.date(), value.date() + timedelta(days=1)) elif criterion in ( 'referrer', 'asn', 'browser_name', 'site', 'browser_version', 'browser_name_version', 'query'): filter_ = getattr(Visit, criterion).ilike('%%%s%%' % value) else: filter_ = func.lower( getattr(Visit, criterion)) == value.lower() query = (self.db .query(Visit) .filter(filter_)) dialect = query.session.bind.dialect compiler = SQLCompiler(dialect, query.statement) compiler.compile() self.count = 0 self.stop = 20 self.state = 'start' self.execute(compiler.string, compiler.params) elif command == 'more': if self.state == 'paused': self.stop += 20 self.state = 'executing' self.cursor.execute( 'FETCH FORWARD 1 FROM visit_cur;') elif command == '/status': for i, conn in enumerate(adb._pool): if conn.busy(): self.write_message( 'INFO|Connection %d is busy: ' 'Executing? %s Closed? %d Status? %s (%d)' % ( i, conn.connection.isexecuting(), conn.connection.closed, conn.connection.get_transaction_status(), conn.connection.get_backend_pid())) else: self.write_message('INFO|Connection %d is free' % i)
def resolve_filter_teacher(self, info, **args): last_day = args.get("day") teacher = args.get("teacher") first_day = last_day - timedelta(days=6) res = Schedule.get_query(info).\ join(PersonnelModel).\ join(TeachersModel).\ filter(TeachersModel.id == teacher).\ filter( or_( between(last_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(first_day, ScheduleModel.sem_beg, ScheduleModel.sem_end), between(ScheduleModel.sem_beg, first_day, last_day), between(ScheduleModel.sem_end, first_day, last_day) ) ).all() return res
def sitting_filter(self): if self.end_date: date_filter_expression = sql.between(schema.sitting.c.start_date, self.start_date, self.end_date) else: date_filter_expression = (schema.sitting.c.start_date >= self.start_date) return sql.and_( schema.sitting.c.status.in_(self._agenda_public_state_ids), date_filter_expression)
def check_bookings(start, end, resource): """Return all sitting a resource is booked for in the period. """ assert(type(resource) == domain.Resource) assert(type(start) == datetime.datetime) assert(type(end) == datetime.datetime) session = Session() b_filter = sql.and_( schema.resources.c.resource_id == resource.resource_id, sql.or_( sql.between(schema.sitting.c.start_date, start, end), sql.between(schema.sitting.c.end_date, start, end), sql.between(start, schema.sitting.c.start_date, schema.sitting.c.end_date), sql.between(end, schema.sitting.c.start_date, schema.sitting.c.end_date) ) ) query = session.query(BookedResources).filter(b_filter) bookings = query.all() return bookings
def get_unavailable_venues(start, end, sitting=None): """ get all venues that are booked in the given time period """ assert type(start) == datetime.datetime assert type(end) == datetime.datetime session = Session() b_filter = sql.or_( sql.between(schema.group_sittings.c.start_date, start, end), sql.between(schema.group_sittings.c.end_date, start, end), sql.between(start, schema.group_sittings.c.start_date, schema.group_sittings.c.end_date), sql.between(end, schema.group_sittings.c.start_date, schema.group_sittings.c.end_date), ) if sitting: if sitting.group_sitting_id: b_filter = sql.and_(b_filter, schema.group_sittings.c.group_sitting_id != sitting.group_sitting_id) query = session.query(BookedVenue).filter(b_filter) venues = query.all() # session.close() return venues
def diaCasa(self, dia, casa): """Muestra los recibos de un dia en una casa""" casa = model.Casa.get(casa) inicio = datetime(dia.year, dia.month, dia.day, 0, 0) fin = datetime(dia.year, dia.month, dia.day, 23, 59) recibos = model.Recibo.query.filter_by(casa=casa).filter( between(model.Recibo.dia, inicio, fin)).all() return dict(recibos=recibos, dia=dia, casa=casa)
def get_sittings(self): formatter = self.request.locale.dates.getFormatter('date', 'full') session = Session() query = session.query(domain.GroupSitting).filter( sql.and_( schema.group_sittings.c.status.in_(get_states('groupsitting', tagged=['public']) ), sql.between( schema.group_sittings.c.start_date, self.start_date, self.end_date))).order_by( schema.group_sittings.c.start_date).options( eagerload('group'), #eagerload('sitting_type'), eagerload('item_schedule'), eagerload('item_schedule.item') ) sittings = query.all() day = u'' day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == 'parliament': _url = url.set_url_context('/business/sittings/obj-%i' % ( sitting.group_sitting_id)) elif sitting.group.type == 'committee': _url = url.set_url_context( '/business/committees/obj-%i/sittings/obj-%i' % (sitting.group.group_id, sitting.group_sitting_id)) else: _url ='#' s_list.append({ 'start': sitting.start_date.strftime("%H:%M"), 'end' : sitting.end_date.strftime("%H:%M"), 'type' : sitting.group.type, 'name' : sitting.group.short_name, 'url' : _url, 'items' : self.get_sitting_items(sitting), }) s_dict['day'] = day s_dict['sittings'] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def _getAllMinistries(date): """ returns all ministries that are valid for this date """ session = Session() mfilter = sql.or_( sql.between(date, schema.groups.c.start_date, schema.groups.c.end_date), sql.and_((schema.groups.c.start_date < date), (schema.groups.c.end_date == None))) query = session.query(domain.Ministry).filter(mfilter) return query.all()
def iplatlondel(): # If only google actually supported sessions! Darn them! # if 'oldolddatetime' in session: # serverhits=m.ServerHit.query.filter(between(m.ServerHit.insdatetime,session.get('oldolddatetime'),session.get('olddatetime'))).limit(4000).all() serverhits = m.ServerHit.query.filter( between(m.ServerHit.insdatetime, datetime.datetime.now() - datetime.timedelta(seconds=8), datetime.datetime.now() - datetime.timedelta(seconds=2))).all() for row in serverhits: yield { 'del': '<Placemark targetId="A' + str(row.id) + '"></Placemark>' }
def periodoCasa(self, inicio, fin, casa): """Muestra los ingresos por caja en un dia, una sucursal y una organización en especifico""" casa = model.Casa.get(casa) inicio = datetime(inicio.year, inicio.month, inicio.day, 0, 0) fin = datetime(fin.year, fin.month, fin.day, 23, 59) recibos = model.Recibo.query.filter_by(casa=casa).filter( between(model.Recibo.dia, inicio, fin)).all() return dict(recibos=recibos, inicio=inicio, fin=fin, casa=casa, dia=fin)
def get_sittings(self): formatter = self.request.locale.dates.getFormatter('date', 'full') session = Session() query = session.query(domain.GroupSitting).filter( sql.and_( schema.sittings.c.status.in_(get_states('groupsitting',tagged=['public'])), sql.between( schema.sittings.c.start_date, self.start_date, self.end_date))).order_by( schema.sittings.c.start_date).options( eagerload('group'), eagerload('sitting_type'), eagerload('item_schedule'), eagerload('item_schedule.item') ) sittings = query.all() day = u'' day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == 'parliament': _url = url.set_url_context('/business/sittings/obj-%i' % ( sitting.sitting_id)) elif sitting.group.type == 'committee': _url = url.set_url_context( '/business/committees/obj-%i/sittings/obj-%i' % (sitting.group.group_id, sitting.sitting_id)) else: _url ='#' s_list.append({ 'start': sitting.start_date.strftime("%H:%M"), 'end' : sitting.end_date.strftime("%H:%M"), 'type' : sitting.group.type, 'name' : sitting.group.short_name, 'url' : _url, 'items' : self.get_sitting_items(sitting), }) s_dict['day'] = day s_dict['sittings'] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def query_builder(dimension, limit = None): reference_date = dtt.date.today() last_week = reference_date - dtt.timedelta(7) last_month = dtt.date(reference_date.year, reference_date.month - 1, reference_date.day) reference_date_id = reference_date.strftime("%Y%m%d") last_week_id = last_week.strftime("%Y%m%d") last_month_id = last_month.strftime("%Y%m%d") return select([ dimension.c.name, func.sum( func.IF( between(fact_processed_ads.c.date_id, last_week_id, reference_date_id), fact_processed_ads.c.loaded_ads, 0)), func.sum( func.IF( between(fact_processed_ads.c.date_id, last_month_id, reference_date_id), fact_processed_ads.c.loaded_ads, 0)) ], group_by = dimension.c.name).\ select_from(fact_processed_ads.join(dimension)).\ limit(limit)
def render(self): session = rdb.Session() newest = session.query(Mail).order_by(desc(Mail.date)).first() if newest is None: return json.dumps([[[]]]) result = list() last = newest.date for i in range(self.columns): dat = newest.date - datetime.timedelta(hours=i * self.step) count = session.query(Mail).filter(between(Mail.date, dat, last)).count() last = dat result.append([dat, count]) dthandler = lambda obj: obj.strftime('%Y-%m-%d %H:%M:%S') if isinstance(obj, datetime.datetime) else None return json.dumps([result], default=dthandler)
def get_unavailable_venues(start, end, sitting=None): """ get all venues that are booked in the given time period """ assert (type(start) == datetime.datetime) assert (type(end) == datetime.datetime) session = Session() b_filter = sql.or_( sql.between(schema.sittings.c.start_date, start, end), sql.between(schema.sittings.c.end_date, start, end), sql.between(start, schema.sittings.c.start_date, schema.sittings.c.end_date), sql.between(end, schema.sittings.c.start_date, schema.sittings.c.end_date)) if sitting: if sitting.sitting_id: b_filter = sql.and_( b_filter, schema.sittings.c.sitting_id != sitting.sitting_id) query = session.query(BookedVenue).filter(b_filter) venues = query.all() #session.close() return venues
def group_sittings_filter(self): if self.end_date: date_filter_expression = sql.between( schema.group_sittings.c.start_date, self.start_date, self.end_date ) else: date_filter_expression = ( schema.group_sittings.c.start_date >= self.start_date ) return sql.and_( schema.group_sittings.c.status.in_(self._agenda_private_state_ids), date_filter_expression )
def filter_by_coordinate_square(self, property_ra, property_dec, lowerleft, upperright): """ Cria uma clausula Where para fazer uma query por posicao usando um quadrado. :param property_ra: nome da coluna que contem a coordenada RA na tabela :param property_dec: nomde da coluna que conte a coordencada Dec na tabela :param lowerleft: um array com as coordenadas do canto inferior esquerdo list([<RA(deg)>, <Dec(deg)]) :param upperright: um array com as coordenadas do canto superior direito list([<RA(deg)>, <Dec(deg)]) :return: list() com as condicoes a serem aplicadas ao statement where """ conditions = list() conditions.append( and_(between( literal_column(str(property_ra)), literal_column(str(lowerleft[0])), literal_column(str(upperright[0])) ), between( literal_column(str(property_dec)), literal_column(str(lowerleft[1])), literal_column(str(upperright[1])) )) ) return conditions
def sitting_filter(self): if self.end_date: date_filter_expression = sql.between( schema.sitting.c.start_date, self.start_date, self.end_date ) else: date_filter_expression = ( schema.sitting.c.start_date >= self.start_date ) return sql.and_( schema.sitting.c.status.in_(self._agenda_public_state_ids), date_filter_expression )
def _getAllMinistries(date): """ returns all ministries that are valid for this date """ session = Session() mfilter=sql.or_( sql.between(date, schema.groups.c.start_date, schema.groups.c.end_date), sql.and_( (schema.groups.c.start_date < date ), (schema.groups.c.end_date == None) ) ) query = session.query(domain.Ministry).filter(mfilter) return query.all()
def get_items(self): session = Session() where_clause = sql.and_( schema.sittings.c.status.in_( get_states("groupsitting", tagged=["public"])), sql.between(schema.sittings.c.start_date, self.start_date, self.end_date)) query = session.query(domain.ItemSchedule).join( domain.GroupSitting).filter(where_clause).order_by( schema.sittings.c.start_date).options( eagerload('sitting'), eagerload('item'), #eagerload('sitting.sitting_type'), lazyload('item.owner')) self.itemschedules = query.all()
def get_current_show(user=None, only_planned=False): """returns the current show""" clauses = [] clauses.append((between(datetime.utcnow(), Show.begin, Show.end)) | (Show.end == None)) clauses.append(UserShow.user == user) if only_planned: clauses.append(Show.flags == Show.FLAGS.PLANNED) shows = Show.query.join(UserShow).filter(*clauses).all() if len(shows) == 1: return shows[0] elif len(shows) > 1: for show in shows: if show.flags & Show.FLAGS.PLANNED: return show return shows[0] else: return None
def get_current_show(user=None, only_planned=False): """returns the current show""" clauses = [] clauses.append((between(datetime.utcnow(), Show.begin, Show.end)) | (Show.end == None)) clauses.append(UserShow.user == user) if only_planned: clauses.append(Show.flags == Show.FLAGS.PLANNED) shows = Show.query.join(UserShow).filter(*clauses).all() if len(shows) == 1: return shows[0] elif len(shows) > 1: for show in shows: if show.flags & Show.FLAGS.PLANNED: return show return shows[0] else: return None
def get_items(self): session = Session() where_clause = sql.and_( schema.sittings.c.status != sitting_wf_state[u'draft-agenda'].id , sql.between( schema.sittings.c.start_date, self.start_date, self.end_date)) query = session.query(domain.ItemSchedule).join( domain.GroupSitting ).filter( where_clause).order_by(schema.sittings.c.start_date).options( eagerload('sitting'), eagerload('item'), eagerload('sitting.sitting_type'), lazyload('item.owner')) self.itemschedules = query.all()
def current_sittings_query(self, date): session = removeSecurityProxy(self.context) group_id = session.parliament_id start_date = session.start_date if start_date.month < date.month: start_date = datetime.date(date.year, date.month, 1) end_date = session.end_date if end_date: if end_date.month > date.month: end_date = date + relativedelta.relativedelta(day=31) else: end_date = date + relativedelta.relativedelta(day=31) s_filter = sql.and_( domain.GroupSitting.group_id == group_id, sql.between(domain.GroupSitting.start_date, start_date, end_date) ) return Session().query(domain.GroupSitting).filter(s_filter).order_by(domain.GroupSitting.start_date)
def current_sittings_query(self, date): session = removeSecurityProxy(self.context) group_id = session.parliament_id start_date = session.start_date if start_date.month < date.month: start_date = datetime.date(date.year, date.month, 1) end_date = session.end_date if end_date: if end_date.month > date.month: end_date = date + relativedelta.relativedelta(day=31) else: end_date = date + relativedelta.relativedelta(day=31) s_filter = sql.and_( domain.GroupSitting.group_id == group_id, sql.between(domain.GroupSitting.start_date, start_date, end_date)) return Session().query(domain.GroupSitting).filter(s_filter).order_by( domain.GroupSitting.start_date)
def get_items(self): session = Session() where_clause = sql.and_( schema.sittings.c.status.in_(get_states( "groupsitting", tagged=["public"])), sql.between( schema.sittings.c.start_date, self.start_date, self.end_date)) query = session.query(domain.ItemSchedule).join( domain.GroupSitting ).filter( where_clause).order_by(schema.sittings.c.start_date).options( eagerload('sitting'), eagerload('item'), #eagerload('sitting.sitting_type'), lazyload('item.owner')) self.itemschedules = query.all()
def top_servers_by_players_q(cutoff_days): """ Query to get the top servers by the amount of players active during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_servers_q = DBSession.query(Server.server_id, Server.name, func.count()).\ filter(Game.server_id==Server.server_id).\ filter(expr.between(Game.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.count(Game.game_id))).\ group_by(Server.server_id).\ group_by(Server.name) return top_servers_q
def top_maps_by_times_played_q(cutoff_days): """ Query to retrieve the top maps by the amount of times it was played during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_maps_q = DBSession.query(Game.map_id, Map.name, func.count()).\ filter(Map.map_id==Game.map_id).\ filter(expr.between(Game.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.count())).\ group_by(Game.map_id).\ group_by(Map.name) return top_maps_q
def top_players_by_time_q(cutoff_days): """ Query for the top players by the amount of time played during a date range. Games older than cutoff_days days old are ignored. """ # only games played during this range are considered right_now = datetime.utcnow() cutoff_dt = right_now - timedelta(days=cutoff_days) top_players_q = DBSession.query(Player.player_id, Player.nick, func.sum(PlayerGameStat.alivetime)).\ filter(Player.player_id == PlayerGameStat.player_id).\ filter(Player.player_id > 2).\ filter(expr.between(PlayerGameStat.create_dt, cutoff_dt, right_now)).\ order_by(expr.desc(func.sum(PlayerGameStat.alivetime))).\ group_by(Player.nick).\ group_by(Player.player_id) return top_players_q
def _range_predicate(attribute, val_range): """ Accepts an attribute and a tuple (min, max), and returns a predicate to find items whose attribute values fall within that range. The range includes the endpoints. This is a private helper function used to avoid cluttering get_recipes(). """ if not hasattr(val_range, '__iter__'): return attribute == val_range else: if len(val_range) != 2: raise ValueError( "Invalid range %s; valid ranges are (min, max) tuples." % str(val_range)) (min_val, max_val) = val_range if min_val != None and max_val != None: return between(attribute, min_val, max_val) elif min_val != None: return attribute >= min_val else: return attribute <= max_val