def handleSchedule(object, event): """ move scheduled items from to be scheduled state to schedule when draft agenda is finalised and vice versa """ session = Session() s = removeSecurityProxy(object) sitting = session.query(domain.GroupSitting ).options(eagerload("group_sitting_type"), eagerload("item_schedule") ).get(s.group_sitting_id) schedulings = map(removeSecurityProxy, sitting.item_schedule) if sitting.status == "draft_agenda": for sch in schedulings: if sch.item.type != "heading": wfc = IWorkflowController(sch.item) wf = wfc.workflow next_state = get_states(sch.item.type, tagged=["tobescheduled"]) for transition_id in wfc.getSystemTransitionIds(): t = wf.get_transition(transition_id) if t.destination in next_state: #TODO find out why firetransition fails for reschedule even #when the user has requisite permissions wfc.fireTransition(transition_id, check_security=False) break elif sitting.status == "published_agenda": for sch in schedulings: if sch.item.type != "heading": wfc = IWorkflowController(sch.item) wf = wfc.workflow next_state = get_states(sch.item.type, tagged=["scheduled"]) for transition_id in wfc.getSystemTransitionIds(): t = wf.get_transition(transition_id) if t.destination in next_state: wfc.fireTransition(transition_id, check_security=False) break
def dump_csv(self, id): dbFacade = self.dbFacade() if "all" == id: balance_uids = [userBalance.balance_uid for userBalance in h.authenticated_user().balances] else: c.balance = dbFacade.balances.balanceDao.find_by_uid(id) if not c.balance: abort(404) if not c.balance.can_modify_balance(h.authenticated_user().uid): abort(403) balance_uids = [c.balance.uid] charset = request.params.get('charset', 'cp1250') changes = dbFacade.db.query(dbFacade.model.BalanceChange) \ .options(eagerload('change_category'), eagerload('tags')).join(dbFacade.model.BalanceChange.change_category) \ .filter(or_(*[dbFacade.model.BalanceChange.balance.has(dbFacade.model.Balance.uid == uid) for uid in balance_uids])) \ .order_by(dbFacade.model.BalanceChange.occurred_on, dbFacade.model.BalanceChange.uid).all() response.headers['Content-Type'] = str('text/csv; charset=%s' % charset) response.headers['Content-Disposition'] = str("attachment; filename = %s.csv" % self.filename((hasattr(c, 'balance') and c.balance.name or "all"))) output = StringIO() csv = UnicodeWriter(output, encoding=charset) csv.writerows([ [unicode(change.occurred_on), unicode(change.amount), change.change_category.name, change.description, change.tags_as_string() ] for change in changes ]) return output.getvalue()
def get_by_users(self, users): user_uids = [user.uid for user in users] q = VoteIdeaData.query \ .filter(VoteIdeaData.user_uid.in_(user_uids)) \ .options(eagerload(VoteIdeaData.idea), eagerload(VoteIdeaData.user)) return self._get_by_query(q)
def test_weakref_with_cycles_o2o(self): s = sessionmaker()() mapper(User, users, properties={ "address":relation(Address, backref="user", uselist=False) }) mapper(Address, addresses) s.add(User(name="ed", address=Address(email_address="ed1"))) s.commit() user = s.query(User).options(eagerload(User.address)).one() user.address.user eq_(user, User(name="ed", address=Address(email_address="ed1"))) del user gc.collect() assert len(s.identity_map) == 0 user = s.query(User).options(eagerload(User.address)).one() user.address.email_address='ed2' user.address.user # lazyload del user gc.collect() assert len(s.identity_map) == 2 s.commit() user = s.query(User).options(eagerload(User.address)).one() eq_(user, User(name="ed", address=Address(email_address="ed2")))
def get_game(cls, game): return DBSession.query(cls)\ .options(eagerload('player'), eagerload('team'))\ .with_polymorphic('*')\ .filter(cls.game_id==game.id)\ .all()
def _topic_feed(request, title, query, order_by): # non moderators cannot see deleted posts, so we filter them out first # for moderators we mark the posts up as deleted so that # they can be kept apart from non-deleted ones. if not request.user or not request.user.is_moderator: query = query.filter_by(is_deleted=False) query = query.order_by(_topic_order[order_by]) query = query.options(eagerload('author'), eagerload('question')) query = query.limit(max(0, min(50, request.args.get('num', 10, type=int)))) feed = AtomFeed(u'%s — %s' % (title, settings.WEBSITE_TITLE), subtitle=settings.WEBSITE_TAGLINE, feed_url=request.url, url=request.url_root) for topic in query.all(): title = topic.title if topic.is_deleted: title += u' ' + _(u'(deleted)') feed.add(title, topic.question.rendered_text, content_type='html', author=topic.author.display_name, url=url_for(topic, _external=True), id=topic.guid, updated=topic.last_change, published=topic.date) return feed.get_response()
def schedule(self, schedule_id=None, eager=True): """ Get educator's full week schedule. :param schedule_id: Optional schedule's id to work on. :type schedule_id: :class:`int` :param eager: Whether or not to eagerly load lesson's group and group's year. :type eager: :class:`bool` """ q = Lesson.query_current(schedule_id) q = q.filter(Lesson.teacher_id == self.id) if eager: q = q.options(eagerload('group'), eagerload('group.year')) days = {} for x in range(0,5): days[x] = [] for lesson in q.all(): days[lesson.day].append(lesson) schedule = [] for day in days.values(): schedule.append(self._process_schedule(day)) return schedule
def lesson(self, day, order, schedule_id=None, eager=True): """ Get scheduled lesson for given day and order. :param day: The day :type day: :class:`int` :param order: The lesson order :type order: :class:`int` :param schedule_id: Schedule to work on :type schedule_id: :class:`int` :param eager: Whether or not eager load lesson's group and group's year. :type eager: :class`bool` """ q = Lesson.query_current(schedule_id) q = q.filter(Lesson.day == day).\ filter(Lesson.order == order).\ filter(Lesson.teacher_id == self.id) if eager: q = q.options(eagerload('group'), eagerload('group.year')) return q.all()
def get_game(cls, game_id): return DBSession.query(cls)\ .options(eagerload('away_team'), eagerload('home_team'), eagerload('home_scores'), eagerload('away_scores'), eagerload('officials'),)\ .get(game_id)
def dnps(cls, game): return DBSession.query(cls)\ .options(eagerload('player'), eagerload('player.positions'), eagerload('team'), eagerload('team.league'))\ .join(GamePlayerDNP)\ .filter(cls.game==game)
def current_players(cls, league): return DBSession.query(cls)\ .join(Person, PersonName)\ .options(eagerload('person.height_weight'), eagerload('person.college'))\ .join(TeamPlayer)\ .filter(TeamPlayer.current==True, Player.league==league)\ .order_by(PersonName.full_name)
def list(self, balance_uid): model = request.environ['sqlalchemy.model'] db = request.environ['sqlalchemy.session'] balance = db.query(model.Balance).filter_by(uid = balance_uid).first() if not balance or not balance.can_see_balance(h.authenticated_user().uid): return { "failure": Messages.permissionDenied() } qr = balance.changes \ .options(eagerload('expense_category'), eagerload('income_category'), eagerload('tags')) \ .order_by(model.BalanceChange.occurred_on) (start_date, end_date) = self._get_dates() if start_date: qr = qr.filter(model.BalanceChange.occurred_on >= start_date) if end_date: qr = qr.filter(model.BalanceChange.occurred_on <= end_date) balance_changes = qr.all() total = len(balance_changes) try: page_nr = int(request.params['page_nr']) except: page_nr = 1 try: items_per_page = int(request.params['items_per_page']) except: items_per_page = 15 subset = Page(balance_changes, item_count=total, current_page=page_nr, items_per_page=items_per_page) return { "summary" : { "total" : self._total(balance_uid), "date_range" : { "expenses" : self._expenses_for_date_range(balance_uid, start_date, end_date), "incomes" : self._incomes_for_date_range(balance_uid, start_date, end_date), }, }, "changes": { "totalItems" : total, "itemsFound" : len(subset), "items" : [{ "uid" : item.uid, "category_uid" : item.is_income and item.income_category_uid or item.expense_category_uid, "category" : (item.is_income and item.income_category or item.expense_category).name, "amount" : Decimal(item.amount), "description" : item.description, "occurred_on": str(item.occurred_on), "is_income": item.is_income, "tags_as_string": item.tags_as_string()} for item in subset ] } }
def rankings(self): session = DBSession() rankings = session.query(FantasyRanking)\ .options(eagerload('rankings'), eagerload('rankings.player'), eagerload('rankings.player.positions')) self.data.update({ 'rankings': rankings }) return self.data
def player_stats(cls, sport, league, game_type, player): stats = DBSession.query(cls)\ .options(eagerload('game'), eagerload('game.season'))\ .with_polymorphic('*')\ .join(Game)\ .filter(cls.player==player, cls.stat_type!='offense', Game.game_type==game_type) return stats
def get_sittings(self): formatter = self.request.locale.dates.getFormatter("date", "full") session = Session() query = ( session.query(domain.GroupSitting) .filter( sql.and_( schema.sittings.c.status.in_(get_states("groupsitting", tagged=["public"])), sql.between(schema.sittings.c.start_date, self.start_date, self.end_date), ) ) .order_by(schema.sittings.c.start_date) .options( eagerload("group"), # eagerload('sitting_type'), eagerload("item_schedule"), eagerload("item_schedule.item"), ) ) sittings = query.all() day = u"" day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == "parliament": _url = url.set_url_context("/business/sittings/obj-%i" % (sitting.sitting_id)) elif sitting.group.type == "committee": _url = url.set_url_context( "/business/committees/obj-%i/sittings/obj-%i" % (sitting.group.group_id, sitting.sitting_id) ) else: _url = "#" s_list.append( { "start": sitting.start_date.strftime("%H:%M"), "end": sitting.end_date.strftime("%H:%M"), "type": sitting.group.type, "name": sitting.group.short_name, "url": _url, "items": self.get_sitting_items(sitting), } ) s_dict["day"] = day s_dict["sittings"] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def team_schedule(cls, team, game_type, year): return DBSession.query(cls)\ .options(eagerload('away_team'), eagerload('home_team'), eagerload('home_scores'), eagerload('away_scores'),)\ .filter(or_(cls.home_team==team, cls.away_team==team), cls.game_type==game_type, cls.season.has(year=year))\ .order_by(Game.game_time)
def _subcontainer_id_map(self, id_list): """Return an id to model map of all subcontainer-type models in the id_list.""" if not id_list: return [] component_class = self.subcontainer_class query = (self._session().query(component_class) .filter(component_class.id.in_(id_list)) .options(eagerload('collection')) .options(eagerload('tags')) .options(eagerload('annotations'))) return dict((row.id, row) for row in query.all())
def get_sittings(self): formatter = self.request.locale.dates.getFormatter('date', 'full') session = Session() query = session.query(domain.GroupSitting).filter( sql.and_( schema.group_sittings.c.status.in_(get_states('groupsitting', tagged=['public']) ), sql.between( schema.group_sittings.c.start_date, self.start_date, self.end_date))).order_by( schema.group_sittings.c.start_date).options( eagerload('group'), #eagerload('sitting_type'), eagerload('item_schedule'), eagerload('item_schedule.item') ) sittings = query.all() day = u'' day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == 'parliament': _url = url.set_url_context('/business/sittings/obj-%i' % ( sitting.group_sitting_id)) elif sitting.group.type == 'committee': _url = url.set_url_context( '/business/committees/obj-%i/sittings/obj-%i' % (sitting.group.group_id, sitting.group_sitting_id)) else: _url ='#' s_list.append({ 'start': sitting.start_date.strftime("%H:%M"), 'end' : sitting.end_date.strftime("%H:%M"), 'type' : sitting.group.type, 'name' : sitting.group.short_name, 'url' : _url, 'items' : self.get_sitting_items(sitting), }) s_dict['day'] = day s_dict['sittings'] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def get_date(cls, league, date): day = datetime.timedelta(days=1) return DBSession.query(cls)\ .options(eagerload('home_scores'), eagerload('away_scores'), eagerload('home_team'), eagerload('away_team'), eagerload('league'))\ .filter(cls.league==league, cls.game_time.between(date, date+day))\ .order_by(cls.game_time)\ .all()
def _contained_id_map(self, id_list): """Return an id to model map of all contained-type models in the id_list.""" if not id_list: return [] component_class = self.contained_class query = (self._session().query(component_class) .filter(component_class.id.in_(id_list)) .options(undefer('_metadata')) .options(eagerload('dataset.actions')) .options(eagerload('tags')) .options(eagerload('annotations'))) return dict((row.id, row) for row in query.all())
def get_sittings(self): #!+QUERIES(mb, nov-2011) to review the extra queries in `get_items` formatter = self.request.locale.dates.getFormatter("date", "full") session = Session() query = session.query(domain.GroupSitting).filter( self.group_sittings_filter ).order_by(schema.group_sittings.c.start_date).options( eagerload("group"), #eagerload("sitting_type"), eagerload("item_schedule") ) if not self.end_date: query = query.limit( BungeniSettings( common.get_application() ).max_sittings_in_business ) sittings = query.all() day = u"" day_list = [] s_dict = {} for sitting in sittings: sday = formatter.format(sitting.start_date) if sday != day: s_list = [] day = sday if s_dict: day_list.append(s_dict) s_dict = {} if sitting.group.type == "parliament": _url = url.set_url_context("/business/sittings/obj-%i" % ( sitting.group_sitting_id)) elif sitting.group.type == "committee": _url = url.set_url_context( "/business/committees/obj-%i/sittings/obj-%i" % (sitting.group.group_id, sitting.group_sitting_id)) else: _url = "#" s_list.append({ "start": sitting.start_date.strftime("%H:%M"), "end": sitting.end_date.strftime("%H:%M"), "type": sitting.group.type, "name": sitting.group.short_name, "url": _url, "items": self.get_sitting_items(sitting), }) s_dict["day"] = day s_dict["sittings"] = s_list else: if s_dict: day_list.append(s_dict) return day_list
def export_to_cdn(self, date_from, date_to): """Export invoices to OPTIMA""" doc = Document() from pytis.model import meta buffer = StringIO.StringIO() invoices = Invoice.query.options(eagerload('elements')).filter(Invoice.issueDate.between(date_from , date_to)).order_by(Invoice.series_number).all() corrects = InvoiceCorrect.query.options(eagerload('positions')).filter(InvoiceCorrect.correct_date.between(date_from, date_to)).order_by(InvoiceCorrect.series_number).all() root = doc.createElement('ROOT') root.setAttribute('xmlns', 'http://www.cdn.com.pl/optima/offline') doc.appendChild(root) companiesElement = doc.createElement('KONTRAHENCI') companiesElement.appendChild(self._add_element(doc, 'WERSJA', '2.00')) companiesElement.appendChild(self._add_element(doc, 'BAZA_ZRD_ID', 'SPRZ')) companiesElement.appendChild(self._add_element(doc, 'BAZA_DOC_ID', 'SPRZ')) root.appendChild(companiesElement) companies = [] for invoice in invoices: """fetch sets of companies""" if invoice.company not in companies: companies.append(invoice.company) for correct in corrects: """fetch sets of companies""" if correct.company not in companies: companies.append(correct.company) for company in companies: companiesElement.appendChild(self._add_company(doc, company)) root.appendChild(companiesElement) invoicesElement = doc.createElement('REJESTRY_SPRZEDAZY_VAT') invoicesElement.appendChild(self._add_element(doc, 'WERSJA', '2.00')) invoicesElement.appendChild(self._add_element(doc, 'BAZA_ZRD_ID', 'SPRZ')) invoicesElement.appendChild(self._add_element(doc, 'BAZA_DOC_ID', 'SPRZ')) for invoice in invoices: invoicesElement.appendChild(self._add_invoice(doc, invoice)) if not invoice.is_exported: invoice.mark_as_exported() meta.Session.commit() for correct in corrects: invoicesElement.appendChild(self._add_correct(doc, correct)) root.appendChild(invoicesElement) buffer.write(doc.toprettyxml(indent='', newl='')) return buffer
def crops(self, pick=None): q = self.query(Image).options(eagerload("develop"), eagerload("exif")).join(Develop).join(Exif) if pick is not None: q = q.filter(Image.pick == pick) q = q.filter(Develop.croppedHeight != None).filter(Develop.croppedWidth != None).filter(Exif.lens != None) c = list() for g in q.all(): if g.develop.fileHeight is None: continue p = (g.develop.croppedHeight + g.develop.croppedWidth) / (g.develop.fileHeight + g.develop.fileWidth) c.append((g.exif.lens, Decimal("%0.2f" % (p)))) return stats(c)
def update(self): """ refresh the query """ session = Session() qfilter = domain.GroupSitting.status.in_(self.states) sittings = session.query(domain.GroupSitting).filter( qfilter).order_by(domain.GroupSitting.start_date.desc() ).options( eagerload('group'), eagerload('sitting_type') ) self.query = sittings
def __init__(self, g, columns): mappedClass = tables.PokemonForm query = g.session.query(mappedClass) query = query.join(tables.PokemonForm.pokemon) query = query.join(tables.Pokemon.species) query = query.options(eagerload('pokemon')) query = query.options(eagerload('pokemon.forms')) query = query.options(eagerload('pokemon.species')) query = query.options(eagerload('pokemon.species.forms')) BaseQueryModel.__init__(self, g, mappedClass, query, columns, defaultSortClause=DefaultPokemonSortClause()) self.tableName = 'PokemonForm' self._hack_small_icons = False
def check_calendar_tasks(self, conn): now = list(time.gmtime()) tasks = [] db_type = get_dbtype() if db_type == constants.MYSQL: tasks = conn.query(TaskCalendar).options(eagerload('task')).filter((TaskCalendar.dow == 0) | TaskCalendar.dow.op('&')(1 << now[6]) > 0).filter((TaskCalendar.month == 0) | TaskCalendar.month.op('&')(1 << now[1]) > 0).filter((TaskCalendar.day == 0) | TaskCalendar.day.op('&')(1 << now[2]) > 0).filter((TaskCalendar.hour == 0) | TaskCalendar.hour.op('&')(1 << now[3]) > 0).filter((TaskCalendar.minute == 0) | TaskCalendar.minute.op('&')(1 << now[4]) > 0) if db_type == constants.ORACLE: tasks = conn.query(TaskCalendar).options(eagerload('task')).filter((TaskCalendar.dow == 0) | (func.BITAND(TaskCalendar.dow, 1 << now[6]) > 0)).filter((TaskCalendar.month == 0) | (func.BITAND(TaskCalendar.month, 1 << now[1]) > 0)).filter((TaskCalendar.day == 0) | (func.BITAND(TaskCalendar.day, 1 << now[2]) > 0)).filter((TaskCalendar.hour == 0) | (func.BITAND(TaskCalendar.hour, 1 << now[3]) > 0)).filter((TaskCalendar.minute == 0) | (func.BITAND(TaskCalendar.minute, 1 << now[4]) > 0)) for cal in tasks: task = cal.task if self.check_prior_execution(conn, task): self.submit_async(task) else: logger.warning('The prior execution of task %s has not completed. Delaying the current execution...' % task.name)
def shots(self): session = DBSession() page = 'shots' team = self.team shot_cords = session.query(ShotCoordinates)\ .join(Shot)\ .options(eagerload('shot.team'), eagerload('shot.game'), eagerload('shot.game.home_team'), eagerload('shot.game.away_team'), eagerload('shot.shooter'))\ .filter(Shot.team==team)\ .all() json_shots = [] for shot_cord in shot_cords: shot = shot_cord.shot game = shot.game if game.away_team == team: opponent = game.home_team elif game.home_team == team: opponent = game.away_team else: print 'home', game.home_team print 'away', game.away_team print 'player team', team raise Exception("team doesn't exist") shot_info = {'shot_id': shot.id, 'shooter_name': shot.shooter.person.names[0].full_name, 'shooter_id': shot.shooter_id, 'period': shot.period, 'time_of_game': shot.time_of_game.strftime('%H:%M'), 'home_score': shot.home_score, 'away_score': shot.away_score, 'x': shot_cord.x, 'y': shot_cord.y, 'length': shot_cord.length, 'make': shot.make, 'shot_type': shot.shot_type, 'game_id': game.id, 'opponent_name': opponent.name, 'opponent_id': opponent.id, } json_shots.append(shot_info) json_shots = json.dumps(json_shots) self.data.update({ 'json_shots': json_shots }) return self.data
def almanacs_kml(self): json = request.params.get('extent') # We need to make sure we only select almanacs with pages here, query = meta.Session.query(Almanac).join(Almanac.pages).distinct() # ... and eager-load the pages since the ktml template uses them. query = query.options(eagerload(Almanac.pages)) # Tried also with contains_eager, not sure what the difference is # but I only get a fraction of the expected almanacs: #query = meta.Session.query(Almanac).join(Almanac.pages).distinct() #query = query.options(contains_eager(Almanac.pages)) # Also tried using a single, second query for the pages. # ... NOPE, requires sqlalchemy > 0.6.0 which blows up on us, # maybe not compatible w/ pylons 0.9.7? #query = meta.Session.query(Almanac).join(Almanac.pages).distinct() #query = query.options(subqueryload(Almanac.pages)) # Tried also without the explicit join().distinct(), this gives # back all almanac whether they have any pages or not: #query = meta.Session.query(Almanac).options(eagerload(Almanac.pages)) if json is not None: shape = simplejson.loads(json) # Stupid asShape returns an Adapter instead of a Geometry. We round # trip it through wkb to get the correct type. bbox = wkb.loads(asShape(shape).to_wkb()) query = query.filter(func.st_intersects(Almanac.location, func.st_transform('SRID=%s;%s' % ('4326', b2a_hex(bbox.to_wkb())), storage_SRID))) c.almanacs = query.order_by(Almanac.modified.desc()).limit(200).all() response.content_type = 'application/vnd.google-earth.kml+xml kml' return render('/almanac/kml.mako')
def delete(self): result = {} for id in request.params.getall('id'): try: item = self.dbFacade().balances.changeDao.find_by_uid(id, options=[eagerload('balance'), eagerload('transfer')]) if item is None: result[id] = { "deleted" : False, "message": Messages.elementNotFound(key=id) } continue if not item.balance or not item.balance.can_modify_balance(h.authenticated_user().uid) \ or (item.transfer and not item.transfer.balance) \ or (item.transfer and not item.transfer.balance.can_modify_balance(h.authenticated_user().uid)): result[id] = { "deleted" : False, "message": Messages.permissionDenied() } continue if item.transfer: self.dbFacade().db.delete(item.transfer) self.dbFacade().db.delete(item) self.dbFacade().db.commit() result[id] = { "deleted" : True } except Exception, e: result[id] = { "deleted" : False, "message" : Messages.failedToDelete(exception=e), }
def deleted_histories( self, trans, **kwd ): """ The number of histories that were deleted more than the specified number of days ago, but have not yet been purged. Also included is the number of datasets associated with the histories. """ params = util.Params( kwd ) message = '' if params.deleted_histories_days: deleted_histories_days = int( params.deleted_histories_days ) cutoff_time = datetime.utcnow() - timedelta( days=deleted_histories_days ) history_count = 0 dataset_count = 0 disk_space = 0 histories = trans.sa_session.query( model.History ) \ .filter( and_( model.History.table.c.deleted == true(), model.History.table.c.purged == false(), model.History.table.c.update_time < cutoff_time ) ) \ .options( eagerload( 'datasets' ) ) for history in histories: for hda in history.datasets: if not hda.dataset.purged: dataset_count += 1 try: disk_space += hda.dataset.file_size except: pass history_count += 1 message = "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged, " \ "disk space: %s." % ( history_count, dataset_count, deleted_histories_days, nice_size( disk_space, True ) ) else: message = "Enter the number of days." return str( deleted_histories_days ), message
def update(self): """Refresh the query """ session = Session() qfilter = domain.GroupSitting.status.in_(self.states) sittings = session.query(domain.GroupSitting).filter(qfilter).order_by( domain.GroupSitting.start_date.desc()).options(eagerload("group")) self.query = sittings self._setData()
def get_sittings(self, start, end): """ return the sittings with scheduled items for the given daterange""" session = Session() query = session.query(domain.GroupSitting).filter( sql.and_(domain.GroupSitting.start_date.between(start, end), domain.GroupSitting.group_id == self.context.group_id)).order_by( domain.GroupSitting.start_date).options( #eagerload("sitting_type"), eagerload("item_schedule"), eagerload("item_schedule.item"), eagerload("item_schedule.discussion")) items = query.all() order = "real_order" if self.display_minutes else "planned_order" for item in items: item.item_schedule.sort(key=operator.attrgetter(order)) #item.sitting_type.sitting_type = item.sitting_type.sitting_type.capitalize() return items
def get_task_details(self, task_ids): result = [] LOGGER.debug("get_task_details query start : " + to_str(datetime.utcnow())) task=DBSession.query(Task).filter(Task.task_id.in_(task_ids)).\ options(eagerload("result")).all() LOGGER.debug("get_task_details query end : " + to_str(datetime.utcnow())) result = self.format_task_details(task) return result
def _get_item_as_row(self, item_name): q = model.meta.Session.query(model.User) q = q.filter(model.User.user_name == unicode(item_name)) q = q.options(eagerload(model.User.memberships)) try: return q.one() except NoResultFound, e: log.exception(e) raise SourceError("No such user: %s" % item_name)
def actions(self): """List of action not in this component""" s = model.Session() components = s.query(model.Component).options(eagerload("modules")).filter_by(com_status='1').all() results = [] for comp in components: for m in comp.modules: results.append({'cname': comp.com_name, 'c_id': comp.id, 'mname':m.mod_title, 'm_id': m.id}) response.headers['content-type'] = 'text/plain' return simplejson.dumps({'Resuls': results})
def get_items(self): session = Session() where_clause = sql.and_( schema.group_sittings.c.status.in_( get_states("groupsitting", tagged=["public"], not_tagged=["agendaprivate"], conjunction="AND")), sql.between(schema.group_sittings.c.start_date, self.start_date, self.end_date)) query = session.query(domain.ItemSchedule).join( domain.GroupSitting).filter(where_clause).order_by( schema.group_sittings.c.start_date).options( eagerload('sitting'), eagerload('item'), #eagerload('sitting.sitting_type'), lazyload('item.owner')) self.itemschedules = query.all()
def __init__(self, context, request, view, manager): super(MemberSpeechesViewlet, self).__init__( context, request, view, manager) user_id = self.context.user_id self.sittings = Session().query(domain.GroupSitting) \ .join((domain.Hansard, domain.Hansard.sitting_id == domain.GroupSitting.sitting_id)) \ .join((domain.Speech, domain.Speech.hansard_id == domain.Hansard.hansard_id)) \ .filter(sql.and_(domain.Speech.person_id == user_id)) \ .options( eagerload("group"), eagerload("hansard"), eagerload("hansard.speeches")) \ .order_by(domain.Speech.start_date.desc()).all() for sitting in self.sittings: sitting.href = "/business/sittings/obj-%s/hansard" % sitting.sitting_id sitting.mp_speeches = [] for speech in sitting.hansard.speeches: if (speech.person_id == user_id): sitting.mp_speeches.append(speech)
def get_groups(user_id): principal_ids = [] session = Session() query = session.query(domain.GroupMember).filter( rdb.and_(domain.GroupMember.user_id == user_id, domain.GroupMember.active_p == True)).options( eagerload("group"), lazyload("user")) for member in query: principal_ids.append(member.group.principal_name) return principal_ids
def _subcontainer_id_map(self, id_list, serialization_params=None): """Return an id to model map of all subcontainer-type models in the id_list.""" if not id_list: return [] component_class = self.subcontainer_class query = (self._session().query(component_class) .filter(component_class.id.in_(id_list)) .options(eagerload('collection')) .options(eagerload('tags')) .options(eagerload('annotations'))) # This will conditionally join a potentially costly job_state summary # All the paranoia if-checking makes me wonder if serialization_params # should really be a property of the manager class instance if serialization_params and serialization_params.keys: if 'job_state_summary' in serialization_params.keys: query = query.options(eagerload('job_state_summary')) return {row.id: row for row in query.all()}
def get_wiki(self, wiki=None, attrload=[], attrload_all=[]): """Get the Wiki instance identified by, `wiki`, which can be, `id` or `wikiurl` or `Wiki` instance. Return List of Wiki instances or Wiki instance.""" msession = meta.Session() if isinstance(wiki, Wiki) and attrload == [] and attrload == []: return wiki # Compose query based on `wiki` type if isinstance(wiki, (int, long)): q = msession.query(Wiki).filter_by(id=wiki) elif isinstance(wiki, (str, unicode)): q = msession.query(Wiki).filter_by(wikiurl=wiki) elif isinstance(wiki, Wiki): q = msession.query(Wiki).filter_by(id=wiki.id) else: q = None # Compose eager-loading options if q != None: q = q.options(*[eagerload_all(e) for e in attrload_all]) q = q.options(*[eagerload(e) for e in attrload]) wiki = q.first() elif wiki == None: q = msession.query(Wiki) q = q.options(*[eagerload_all(e) for e in attrload_all]) q = q.options(*[eagerload(e) for e in attrload]) wiki = q.all() else: wiki = None return wiki
def all(team_id=0): # show all matches team = None if team_id: if not g.user.is_on_team(team_id): team = Team.query.filter_by(id=team_id).first() if not team: return redirect(url_for('all')) page = {'top': 'team', 'sub': 'matches'} else: team = {'id': team_id, 'name': g.user.teams[team_id].name} page = {'top': 'my_teams', 'sub': 'matches'} matches=Match.query.\ options(eagerload('competition')).\ filter_by(team_id=team_id).\ order_by(Match.date.asc()).\ order_by(Match.id.asc()).\ all() else: matches=Match.query.\ options(eagerload('competition')).\ order_by(Match.date.asc()).\ order_by(Match.id.asc()).\ all() page = {'top': 'matches', 'sub': 'all_matches'} now = datetime.datetime.utcnow() upcoming = [] previous = [] for m in matches: if m.date > now: upcoming.append(m) else: previous.append(m) return rt('matches/table.html', page=page, team=team, upcoming=upcoming, previous=previous)
def package(pkg): db = current_app.config['DB']() collections = list(queries.collections(db)) query = db.query(tables.Package) query = query.options(eagerload('status_obj')) query = query.options(subqueryload('collection_packages')) query = query.options(subqueryload('collection_packages.links')) query = query.options(eagerload('collection_packages.status_obj')) query = query.options(subqueryload('collection_packages.rpms')) query = query.options( eagerload('collection_packages.rpms.py_dependencies')) package = query.get(pkg) if package is None: abort(404) query = queries.dependencies(db, package) query = query.options(eagerload('status_obj')) query = query.options(subqueryload('collection_packages')) query = query.options(subqueryload('collection_packages.links')) query = query.options(eagerload('collection_packages.status_obj')) dependencies = list(query) dependents = list(queries.dependents(db, package)) in_progress_deps = [p for p in dependencies if p.status == 'in-progress'] return render_template( 'package.html', breadcrumbs=( (url_for('hello'), 'Python 3 Porting Database'), (url_for('package', pkg=pkg), pkg), ), collections=collections, pkg=package, dependencies=dependencies, dependents=dependents, deptree=[(package, gen_deptree(dependencies))], in_progress_deps=in_progress_deps, len_dependencies=len(dependencies), dependencies_status_counts=get_status_counts(dependencies), )
def get_log(session, dag_id, dag_run_id, task_id, task_try_number, full_content=False, token=None): """Get logs for specific task instance""" key = current_app.config["SECRET_KEY"] if not token: metadata = {} else: try: metadata = URLSafeSerializer(key).loads(token) except BadSignature: raise BadRequest("Bad Signature. Please use only the tokens provided by the API.") if metadata.get('download_logs') and metadata['download_logs']: full_content = True if full_content: metadata['download_logs'] = True else: metadata['download_logs'] = False task_log_reader = TaskLogReader() if not task_log_reader.supports_read: raise BadRequest("Task log handler does not support read logs.") ti = ( session.query(TaskInstance) .filter(TaskInstance.task_id == task_id, TaskInstance.run_id == dag_run_id) .join(TaskInstance.dag_run) .options(eagerload(TaskInstance.dag_run)) .one_or_none() ) if ti is None: metadata['end_of_log'] = True raise NotFound(title="TaskInstance not found") dag = current_app.dag_bag.get_dag(dag_id) if dag: try: ti.task = dag.get_task(ti.task_id) except TaskNotFound: pass return_type = request.accept_mimetypes.best_match(['text/plain', 'application/json']) # return_type would be either the above two or None if return_type == 'application/json' or return_type is None: # default logs, metadata = task_log_reader.read_log_chunks(ti, task_try_number, metadata) logs = logs[0] if task_try_number is not None else logs token = URLSafeSerializer(key).dumps(metadata) return logs_schema.dump(LogResponseObject(continuation_token=token, content=logs)) # text/plain. Stream logs = task_log_reader.read_log_stream(ti, task_try_number, metadata) return Response(logs, headers={"Content-Type": return_type})
def getBom(self, itemId): item = self.sess.query(Item).options(eagerload('boms')).get(itemId) return [{ 'pkey': '%d%d' % (sku.itemId, sku.componentItemId), 'itemId': sku.componentItemId, 'itemName': sku.item.itemName, 'specification': sku.item.specification, 'model': sku.item.model, 'weight': sku.item.weight, 'quantity': sku.quantity } for sku in item.boms]
def find(cls, id, instance_filter=True, include_deleted=False, full=False): try: q = meta.Session.query(Proposal) id = int(unicode(id).split('-', 1)[0]) q = q.filter(Proposal.id == id) if full: q = q.options(eagerload(Proposal.comments)) q = q.options(eagerload(Proposal.adopt_poll)) q = q.options(eagerload(Proposal.rate_poll)) q = q.options(eagerload(Proposal.taggings)) q = q.options(eagerload(Proposal.parents)) if ifilter.has_instance() and instance_filter: q = q.filter(Proposal.instance_id == ifilter.get_instance().id) if not include_deleted: q = q.filter(or_(Proposal.delete_time == None, # noqa Proposal.delete_time > datetime.utcnow())) return q.limit(1).first() except Exception, e: log.warn("find(%s): %s" % (id, e)) return None
def find_methods(self, search_string, stage=None, limit=100): """Find all methods that match the search string""" with closing(self.database.session()) as session: wildcard_search_string = search_string.replace(' ', '%') q = session.query(models.Method).filter(models.Method.name.ilike(f'%{wildcard_search_string}%')) if stage: q = q.filter(models.Method.stage == stage) if limit: q = q.limit(limit) return q.options(eagerload('*')).all()
def index(self, page=1, search=None, media_filter=None, **kwargs): """List comments with pagination and filtering. :param page: Page number, defaults to 1. :type page: int :param search: Optional search term to filter by :type search: unicode or None :param media_filter: Optional media ID to filter by :type media_filter: int or None :rtype: dict :returns: comments The list of :class:`~mediadrop.model.comments.Comment` instances for this page. edit_form The :class:`mediadrop.forms.admin.comments.EditCommentForm` instance, to be rendered for each instance in ``comments``. search The given search term, if any search_form The :class:`~mediadrop.forms.admin.SearchForm` instance media_filter The given podcast ID to filter by, if any media_filter_title The media title for rendering if a ``media_filter`` was specified. """ comments = Comment.query.trash(False)\ .order_by(Comment.reviewed.asc(), Comment.created_on.desc()) # This only works since we only have comments on one type of content. # It will need re-evaluation if we ever add others. comments = comments.options(orm.eagerload('media')) if search is not None: comments = comments.search(search) media_filter_title = media_filter if media_filter is not None: comments = comments.filter( Comment.media.has(Media.id == media_filter)) media_filter_title = DBSession.query(Media.title).get(media_filter) media_filter = int(media_filter) return dict( comments=comments, edit_form=edit_form, media_filter=media_filter, media_filter_title=media_filter_title, search=search, search_form=search_form, )
def my_previous_matches(): if g.user.is_guest: return redirect(url_for('all')) cutoff = datetime.datetime.utcnow() matches = Match.query.\ filter(Match.team_id.in_(g.user.teams.keys())).\ filter(Match.date <= cutoff).\ options(eagerload('competition'), \ eagerload('server')).\ order_by(Match.date.asc()).\ order_by(Match.id.asc()).\ all() return rt('matches/table.html', page={ 'top': 'my_matches', 'sub': 'previous' }, previous_only=True, previous=matches)
def by_subjects(cls, subjects, include_deleted=True): try: q = meta.Session.query(Poll) q = q.filter(Poll.subject.in_(subjects)) q = q.options(eagerload(Poll.tallies)) if not include_deleted: q = q.filter(or_(Poll.end_time == None, # noqa Poll.end_time > datetime.utcnow())) return q.all() except Exception, e: log.exception("by_subjects(%s): %s" % (subjects, e), e) return []
def processEager(eager): if eager == None: return tuple() else: l = [] if isinstance(eager, basestring): eager = (eager, ) for e in eager: l.append(eagerload(_replacements(e))) return l
def get_bills(self, **kwargs): """ Returns a list of all bills filtered by values. """ records = [] try: session = self.dal.Session() records = session.query(Bill).options( eagerload('category')).filter_by(**kwargs).all() except Exception, e: print str(e)
def handleSchedule(object, event): """ move scheduled items from to be scheduled state to schedule when draft agenda is finalised and vice versa""" session = Session() s = removeSecurityProxy(object) sitting = session.query(domain.GroupSitting).options( eagerload('sitting_type'), eagerload('item_schedule')).get(s.sitting_id) schedulings = map(removeSecurityProxy, sitting.item_schedule) if sitting.status == "draft_agenda": for sch in schedulings: if sch.item.type != "heading": wf_info = IWorkflowInfo(sch.item) transitions = wf_info.getSystemTransitionIds() state = wf_info.state() wf = wf_info.workflow() next_state = get_states(sch.item.type, tagged=["tobescheduled"]) for transition_id in transitions: t = wf.getTransition(state.getState(), transition_id) if t.destination in next_state: #TODO find out why firetransition fails for reschedule even #when the user has requisite permissions wf_info.fireTransition(transition_id, check_security=False) break elif sitting.status == "published_agenda": for sch in schedulings: if sch.item.type != "heading": wf_info = IWorkflowInfo(sch.item) transitions = wf_info.getSystemTransitionIds() state = wf_info.state() wf = wf_info.workflow() next_state = get_states(sch.item.type, tagged=["scheduled"]) for transition_id in transitions: t = wf.getTransition(state.getState(), transition_id) if t.destination in next_state: wf_info.fireTransition(transition_id, check_security=False) break
def purge_histories( app, cutoff_time, remove_from_disk, info_only=False, force_retry=False ): # Purges deleted histories whose update_time is older than the cutoff_time. # The dataset associations of each history are also marked as deleted. # The Purge Dataset method will purge each Dataset as necessary # history.purged == True simply means that it can no longer be undeleted # i.e. all associated datasets are marked as deleted history_count = 0 start = time.time() if force_retry: histories = app.sa_session.query( app.model.History ) \ .filter( and_( app.model.History.table.c.deleted == true(), app.model.History.table.c.update_time < cutoff_time ) ) \ .options( eagerload( 'datasets' ) ) else: histories = app.sa_session.query( app.model.History ) \ .filter( and_( app.model.History.table.c.deleted == true(), app.model.History.table.c.purged == false(), app.model.History.table.c.update_time < cutoff_time ) ) \ .options( eagerload( 'datasets' ) ) for history in histories: print ("### Processing history id %d (%s)" % (history.id, history.name)).encode('utf-8') for dataset_assoc in history.datasets: _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only=info_only ) # mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable if not info_only: # TODO: should the Delete DefaultHistoryPermissions be deleted here? This was incorrectly # done in the _list_delete() method of the history controller, so copied it here. Not sure # if we should ever delete info like this from the db though, so commented out for now... # for dhp in history.default_permissions: # dhp.delete() print "Purging history id ", history.id history.purged = True app.sa_session.add( history ) app.sa_session.flush() else: print "History id %d will be purged (without 'info_only' mode)" % history.id history_count += 1 stop = time.time() print 'Purged %d histories.' % history_count print "Elapsed time: ", stop - start print "##########################################"
def get_wikicomment(self, wikicomment=None, attrload=[], attrload_all=[]): """Get the wiki comment identified by, `wikicomment` which can be, `id` or `WikiComment` instance. if wikicomment==None, return all the comments for `wiki`. Return, List of WikiComment instances or WikiComment instance.""" if isinstance( wikicomment, WikiComment ) and attrload==[] and \ attrload_all==[] : wikicmt = wikicomment msession = meta.Session() # Compose query based on `wikicmt` type if isinstance(wikicomment, (int, long)): q = msession.query(WikiComment).filter_by(id=wikicomment) elif isinstance(wikicomment, WikiComment): q = msession.query(WikiComment).filter_by(id=wikicomment.id) else: q = None # Compose eager-loading options if q != None: q = q.options(*[eagerload_all(e) for e in attrload_all]) q = q.options(*[eagerload(e) for e in attrload]) wikicmt = q.first() elif wikicomment == None: q = msession.query(WikiComment) q = q.options(*[eagerload_all(e) for e in attrload_all]) q = q.options(*[eagerload(e) for e in attrload]) wikicmt = q.all() else: wikicmt = None return wikicmt
def index(self, page=1, search=None, type=None, **kwargs): """List comments with pagination and filtering. :param page: Page number, defaults to 1. :type page: int :param search: Optional search term to filter by :type search: unicode or None :param media_filter: Optional media ID to filter by :type media_filter: int or None :rtype: dict :returns: comments The list of :class:`~mediadrop.model.comments.Comment` instances for this page. edit_form The :class:`mediadrop.forms.admin.comments.EditCommentForm` instance, to be rendered for each instance in ``comments``. search The given search term, if any search_form The :class:`~mediadrop.forms.admin.SearchForm` instance media_filter The given podcast ID to filter by, if any media_filter_title The media title for rendering if a ``media_filter`` was specified. """ comments = Comment.query.trash(False)\ .order_by(Comment.reviewed.asc(), Comment.created_on.desc()) user = request.perm.user.display_name group = request.perm.user.groups[0].group_name # This only works since we only have comments on one type of content. # It will need re-evaluation if we ever add others. comments = comments.options(orm.eagerload('media')) if search is not None: comments = comments.search(search) # If we do implement a search by podcast on the comments do it using those variables. media_filter_title = None media_filter = None return dict( comments=comments, edit_form=edit_form, media_filter=media_filter, media_filter_title=media_filter_title, search=search, search_form=search_form, )
def get(self, teamId, projectId, todolistId, todoitemId): project = Project.query.filter_by(id=projectId).first() todolist = TodoList.query.filter_by(id=todolistId).first() todoItem = TodoItem.query.filter_by(id=todoitemId).first() operations = Operation.query.options(eagerload('own')).filter_by( team_id=teamId, target_type=4, target_id=todoitemId).order_by(Operation.createTime).all() self.render("operation/todoitem.html", teamId=teamId, todolist=todolist, project=project, todoItem=todoItem, operations=operations)
def lesson(self, day, order, schedule_id=None, eager=True): """ Get scheduled lesson for specified day and order. :param schedule_id: Optional schedule's id to work on. :type schedule_id: :class:`int` :param eager: Whether or not to load eagerly lesson's group and group's year. """ q = Lesson.query_current(schedule_id) q = q.join((Group, Lesson.group_id == Group.id)).\ join((GroupMembership, Group.id == GroupMembership.group_id)).\ filter(GroupMembership.student_id == self.id).\ filter(GroupMembership.active == True).\ filter(Lesson.day == day).\ filter(Lesson.order == order) if eager: q = q.options(eagerload('group'), eagerload('group.year')) return q.all()
def get_sittings(self): session = Session() logged_in_user = get_db_user() title = get_title_of_user(logged_in_user.user_id) sittings = session.query(domain.GroupSitting).join((domain.Assignment, domain.GroupSitting.sitting_id == domain.Assignment.sitting_id)) \ .filter(sql.and_(domain.GroupSitting.status == 'draft_minutes', domain.Assignment.staff_id == logged_in_user.user_id)) \ .options( eagerload("hansard"), eagerload("hansard.media_paths")).all() for sitting in sittings: if title == "Editor": takes = session.query(domain.Take)\ .filter(sql.and_(domain.Take.editor_id == logged_in_user.user_id, domain.Take.sitting_id == sitting.sitting_id)) \ .order_by(domain.Take.start_date).all() elif title == "Reader": takes = session.query(domain.Take)\ .filter(sql.and_(domain.Take.reader_id == logged_in_user.user_id, domain.Take.sitting_id == sitting.sitting_id)) \ .order_by(domain.Take.start_date).all() elif title == "Reporter": takes = session.query(domain.Take)\ .filter(sql.and_(domain.Take.reporter_id == logged_in_user.user_id, domain.Take.sitting_id == sitting.sitting_id)) \ .order_by(domain.Take.start_date).all() for take in takes: take.start_time = take.start_date - sitting.start_date take.end_time = take.end_date - sitting.start_date sitting.takes = takes if sitting.hansard.media_paths: sitting.file = sitting.hansard.media_paths.web_optimised_video_path else: sitting.file = "" sitting.name = sitting.group.short_name + str( sitting.start_date.strftime('%d-%B-%Y %H:%M')) return sittings
def reload(self): """ Load all votes by the user regarding the poll. """ q = model.meta.Session.query(Vote) q = q.filter(Vote.user_id == self.user.id) q = q.filter(Vote.poll_id == self.poll.id) q = q.options(eagerload(Vote.delegation)) if self.at_time: q = q.filter(Vote.create_time <= self.at_time) q = q.order_by(Vote.id.desc()) self.votes = q.all() return self
def package(pkg): db = current_app.config['DB']() collections = list(queries.collections(db)) query = db.query(tables.Package) query = query.options(eagerload('status_obj')) query = query.options(subqueryload('collection_packages')) query = query.options(subqueryload('collection_packages.links')) query = query.options(eagerload('collection_packages.status_obj')) query = query.options(subqueryload('collection_packages.rpms')) query = query.options( eagerload('collection_packages.rpms.py_dependencies')) package = query.get(pkg) if package is None: abort(404) query = queries.dependencies(db, package) query = query.options(eagerload('status_obj')) query = query.options(subqueryload('collection_packages')) query = query.options(subqueryload('collection_packages.links')) query = query.options(eagerload('collection_packages.status_obj')) dependencies = list(query) dependents = list(queries.dependents(db, package)) query = queries.build_dependencies(db, package) query = query.options(eagerload('status_obj')) query = query.options(subqueryload('collection_packages')) query = query.options(subqueryload('collection_packages.links')) query = query.options(eagerload('collection_packages.status_obj')) build_dependencies = list(query) build_dependents = list(queries.build_dependents(db, package)) return render_template( 'package.html', breadcrumbs=( (url_for('hello'), 'Python 3 Porting Database'), (url_for('package', pkg=pkg), pkg), ), collections=collections, pkg=package, dependencies=dependencies, dependents=dependents, deptree=[(package, gen_deptree(dependencies))], dependencies_status_counts=get_status_counts(dependencies), build_dependencies=build_dependencies, build_dependents=build_dependents, build_deptree=[(package, gen_deptree(build_dependencies, run_time=False, build_time=True))], build_dependencies_status_counts=get_status_counts(build_dependencies), )