def get(self, job_id): job = Job.query.options( subqueryload_all(Job.phases), joinedload('project', innerjoin=True), ).get(job_id) if job is None: return '', 404 phase_list = list( JobPhase.query.options( subqueryload_all(JobPhase.steps, JobStep.node), subqueryload_all(JobPhase.steps, JobStep.logsources)).filter( JobPhase.job_id == job.id, ).order_by( JobPhase.date_started.asc(), JobPhase.date_created.asc())) context = [] for phase, phase_data in zip(phase_list, self.serialize(phase_list)): phase_data['steps'] = [] for step, step_data in zip(phase.steps, self.serialize(list(phase.steps))): step_data['logSources'] = self.serialize(list(step.logsources)) phase_data['steps'].append(step_data) context.append(phase_data) return self.respond(context, serialize=False)
def try_get_projects_and_repository(args): """Given a set of HTTP POST arguments, try and find the appropriate projects and repository. Possible inputs: project Returns: (A list containing only this project) * its repository repository Returns: All active projects for this repo * repo repository living at key 'repository[phabricator.callsign]' Returns: All active projects for this repo * repo """ if args.project: repository = Repository.query.get(args.project.repository_id) return [args.project], repository elif args.repository: repository = args.repository projects = list( Project.query.options(subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) return projects, repository elif args['repository[phabricator.callsign]']: repository = args['repository[phabricator.callsign]'] projects = list( Project.query.options(subqueryload_all('plans'), ).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id, )) return projects, repository else: return None, None
def try_get_projects_and_repository(args): """Given a set of HTTP POST arguments, try and find the appropriate projects and repository. Possible inputs: project Returns: (A list containing only this project) * its repository repository Returns: All active projects for this repo * repo repository living at key 'repository[phabricator.callsign]' Returns: All active projects for this repo * repo """ if args.project: repository = Repository.query.get(args.project.repository_id) return [args.project], repository elif args.repository: repository = args.repository projects = list( Project.query.options(subqueryload_all("plans")).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id ) ) return projects, repository elif args["repository[phabricator.callsign]"]: repository = args["repository[phabricator.callsign]"] projects = list( Project.query.options(subqueryload_all("plans")).filter( Project.status == ProjectStatus.active, Project.repository_id == repository.id ) ) return projects, repository else: return None, None
def get_all_syntheses(self): from .idea_graph_view import Synthesis return self.db.query(Synthesis).options( subqueryload_all('idea_assocs.idea'), subqueryload_all('idealink_assocs.idea_link'), subqueryload_all(Synthesis.published_in_post)).filter( Synthesis.discussion_id == self.id).all()
def add_addresses(session): """ Add an AddressAssignment record for every PrimaryNameAssociation """ q = session.query(PrimaryNameAssociation) q = q.join(System, DnsDomain) q = q.filter(System.ip != None) q = q.filter(~exists().where(AddressAssignment.ip == System.ip)) q = q.options(contains_eager('dns_record')) q = q.options(contains_eager('dns_record.dns_domain')) q = q.options(subqueryload_all('hardware_entity.interfaces.vlans.assignments')) q = q.options(subqueryload_all('hardware_entity.interfaces._vlan_ids')) count = 0 pnas = q.all() for pna in pnas: hw = pna.hardware_entity if len(hw.interfaces) != 1: print "{0} has an unexpected number of interfaces, skipping: " \ "{1}".format(hw, len(hw.interfaces)) continue iface = hw.interfaces[0] if len(iface.vlans[0].addresses): print "{0} already has addresses, skipping".format(iface) continue #print "Adding AddressAssignment record for {0:l}".format(hw) iface.vlans[0].addresses.append(pna.dns_record.ip) count += 1 session.flush() print "Added %d AddressAssignment records" % count
def get(self, repo: Repository): """ Return a list of builds for the given repository. """ query = (Build.query.options( joinedload("revision"), subqueryload_all("authors"), subqueryload_all("revision.authors"), subqueryload_all("stats"), ).filter(Build.repository_id == repo.id).order_by(Build.number.desc())) user = request.args.get("user") if user: if user == "me": user = auth.get_current_user() else: user = User.query.get(user) if not user: return self.respond([]) query = query.filter( Build.authors.any( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA )))) return self.paginate_with_schema(builds_schema, query)
def add_addresses(session): """ Add an AddressAssignment record for every PrimaryNameAssociation """ q = session.query(PrimaryNameAssociation) q = q.join(System, DnsDomain) q = q.filter(System.ip != None) q = q.filter(~exists().where(AddressAssignment.ip == System.ip)) q = q.options(contains_eager('dns_record')) q = q.options(contains_eager('dns_record.dns_domain')) q = q.options( subqueryload_all('hardware_entity.interfaces.vlans.assignments')) q = q.options(subqueryload_all('hardware_entity.interfaces._vlan_ids')) count = 0 pnas = q.all() for pna in pnas: hw = pna.hardware_entity if len(hw.interfaces) != 1: print "{0} has an unexpected number of interfaces, skipping: " \ "{1}".format(hw, len(hw.interfaces)) continue iface = hw.interfaces[0] if len(iface.vlans[0].addresses): print "{0} already has addresses, skipping".format(iface) continue #print "Adding AddressAssignment record for {0:l}".format(hw) iface.vlans[0].addresses.append(pna.dns_record.ip) count += 1 session.flush() print "Added %d AddressAssignment records" % count
def eager_query(cls): from sqlalchemy import orm query = super(Objective, cls).eager_query() return query.options( orm.subqueryload_all('section_objectives.section'), orm.subqueryload_all('objective_controls.control'), orm.subqueryload_all('objective_objects'))
def eager_query(cls): from sqlalchemy import orm query = super(Program, cls).eager_query() return query.options( orm.subqueryload_all('program_directives.directive'), orm.subqueryload('cycles'), orm.subqueryload_all('program_controls.control'))
def eager_query(cls): from sqlalchemy import orm query = super(Risk, cls).eager_query() return query.options( orm.subqueryload_all('control_risks.control'), # FIXME: make eager-loading work for categorizations #orm.subqueryload_all('categorizations.categories'), orm.subqueryload_all('risk_risky_attributes.risky_attribute'))
def query(self, req): self._domainelements = DBSession.query(DomainElement).all() return DBSession.query(Language)\ .order_by(Language.id)\ .options( subqueryload_all('languageidentifier', 'identifier'), subqueryload_all('countries'), joinedload_all(Language.valuesets, ValueSet.values), joinedload_all(WalsLanguage.genus, Genus.family))
def get(self, build: Build): """ Return a list of jobs for a given build. """ query = Job.query.options( subqueryload_all('stats'), subqueryload_all('failures'), ).filter(Job.build_id == build.id).order_by(Job.number.asc()) return self.respond_with_schema(jobs_schema, query)
def eager_nodes_handlers(cls, iterable): """Eager load objects instances that is used in nodes handler. :param iterable: iterable (SQLAlchemy query) :returns: iterable (SQLAlchemy query) """ options = (joinedload('cluster'), subqueryload_all('nic_interfaces.assigned_networks_list'), subqueryload_all('bond_interfaces.assigned_networks_list'), subqueryload_all('ip_addrs.network_data')) return cls.eager_base(iterable, options)
def eager_query(cls): from sqlalchemy import orm query = super(Directive, cls).eager_query() return query.options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls'), orm.subqueryload_all('program_directives.program'), orm.subqueryload_all('directive_controls'), orm.subqueryload('sections'))
def get_all_syntheses(self): from .idea_graph_view import Synthesis return self.db.query( Synthesis).options( subqueryload_all( 'idea_assocs.idea'), subqueryload_all( 'idealink_assocs.idea_link'), subqueryload_all( Synthesis.published_in_post)).filter( Synthesis.discussion_id == self.id).all()
def eager_query(cls): from sqlalchemy import orm query = super(System, cls).eager_query() return query.options( orm.joinedload('type'), orm.joinedload('network_zone'), orm.subqueryload('responses'), orm.subqueryload_all('system_controls.control'), orm.subqueryload_all('sub_system_systems.child'), orm.subqueryload_all('super_system_systems.parent'), orm.subqueryload('transactions'))
def eager_nodes_handlers(cls, iterable): """Eager load objects instances that is used in nodes handler. :param iterable: iterable (SQLAlchemy query) :returns: iterable (SQLAlchemy query) """ options = ( joinedload('cluster'), subqueryload_all('nic_interfaces.assigned_networks_list'), subqueryload_all('bond_interfaces.assigned_networks_list'), subqueryload_all('ip_addrs.network_data') ) return cls.eager_base(iterable, options)
def get(self, revision: Revision): """ Return a list of jobs for a given revision. """ build = fetch_build_for_revision(revision.repository, revision) if not build: return self.respond(status=404) build_ids = [original.id for original in build.original] query = Job.query.options( subqueryload_all('stats'), subqueryload_all('failures'), ).filter(Job.build_id.in_(build_ids)).order_by(Job.number.asc()) return self.respond_with_schema(jobs_schema, query)
def get_all_syntheses_query(self, include_unpublished=True, include_tombstones=False): from .idea_graph_view import Synthesis from .post import SynthesisPost, PublicationStates condition = SynthesisPost.publication_state == PublicationStates.PUBLISHED if not include_tombstones: condition = condition & SynthesisPost.tombstone_condition() if include_unpublished: condition = condition | (SynthesisPost.id == None) return self.db.query(Synthesis).outerjoin(SynthesisPost).options( subqueryload_all('idea_assocs.idea'), subqueryload_all('idealink_assocs.idea_link'), subqueryload_all(Synthesis.published_in_post)).filter( Synthesis.discussion_id == self.id, condition)
def get_latest_builds(repo_list: List[Repository], result: Result): # TODO(dcramer): this should find the 'last build in [default branch]' if not repo_list: return {} build_query = db.session.query(Build.id, ).join( Source, Build.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Build.status == Status.finished, Build.result == result, ).order_by(Build.date_created.desc(), ) build_map = dict( db.session.query( Repository.id, build_query.filter( Build.repository_id == Repository.id, ).limit(1).as_scalar(), ).filter(Repository.id.in_(r.id for r in repo_list), )) if not build_map: return {} return { b.repository_id: b for b in Build.query.unrestricted_unsafe().filter( Build.id.in_(build_map.values()), ).options( joinedload('source'), joinedload('source').joinedload('author'), joinedload('source').joinedload('revision'), joinedload('source').joinedload('patch'), subqueryload_all('stats'), ) }
def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
def get(cls, id): """Get the CirculationObject associated with the given id.""" obj = cls.query.options(subqueryload_all('*')).get(id) if obj is None: msg = "A {0} object with id {1} doesn't exist" raise Exception(msg.format(cls.__name__, id)) data = jsonpickle.decode(obj._data) if hasattr(cls, '_construction_schema'): for key, func in cls._construction_schema.items(): try: obj.__setattr__(key, func(data)) except AttributeError: pass # Getting data for other modules from invenio_circulation.views.utils import ( send_signal, flatten) from invenio_circulation.signals import get_entity construction_data = flatten(send_signal(get_entity, cls.__name__, None)) if construction_data: for key in construction_data: try: obj.__setattr__(key, data[key]) except KeyError: pass return obj
def get(self, repo: Repository): """ Return a list of builds for the given repository. """ user = auth.get_current_user() query = (Build.query.options( joinedload("source", innerjoin=True), joinedload("source", innerjoin=True).joinedload("author"), joinedload("source", innerjoin=True).joinedload("revision", innerjoin=True), subqueryload_all("stats"), ).filter(Build.repository_id == repo.id).order_by(Build.number.desc())) show = request.args.get("show") if show == "mine": query = query.filter( Build.author_id.in_( db.session.query(Author.id).filter( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA ))))) return self.paginate_with_schema(builds_schema, query)
def get(self, job_id): job = Job.query.options( subqueryload_all(Job.phases), joinedload('project', innerjoin=True), ).get(job_id) if job is None: return '', 404 previous_runs = Job.query.filter( Job.project == job.project, Job.date_created < job.date_created, Job.status == Status.finished, Job.id != job.id, ).order_by(Job.date_created.desc())[:NUM_PREVIOUS_RUNS] # find all parent groups (root trees) test_groups = sorted(TestGroup.query.filter( TestGroup.job_id == job.id, TestGroup.parent_id == None, # NOQA: we have to use == here ), key=lambda x: x.name) test_failures = TestGroup.query.options( joinedload('parent'), ).filter( TestGroup.job_id == job.id, TestGroup.result == Result.failed, TestGroup.num_leaves == 0, ).order_by(TestGroup.name.asc()) num_test_failures = test_failures.count() test_failures = test_failures[:25] if test_failures: failure_origins = find_failure_origins( job, test_failures) for test_failure in test_failures: test_failure.origin = failure_origins.get(test_failure) extended_serializers = { TestGroup: TestGroupWithOriginSerializer(), } log_sources = list(LogSource.query.options( joinedload('step'), ).filter( LogSource.job_id == job.id, ).order_by(LogSource.date_created.asc())) context = self.serialize(job) context.update({ 'phases': job.phases, 'testFailures': { 'total': num_test_failures, 'testGroups': self.serialize(test_failures, extended_serializers), }, 'logs': log_sources, 'testGroups': test_groups, 'previousRuns': previous_runs, }) return self.respond(context)
def get(self, repo: Repository): """ Return a list of builds for the given repository. """ user = auth.get_current_user() query = Build.query.options( contains_eager('source'), joinedload('source').joinedload('author'), joinedload('source').joinedload('revision'), joinedload('source').joinedload('patch'), subqueryload_all('stats'), ).join( Source, Build.source_id == Source.id, ).filter( Build.repository_id == repo.id, ).order_by(Build.number.desc()) show = request.args.get('show') if show == 'mine': query = query.filter( Source.author_id.in_( db.session.query(Author.id).filter(Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id ) )) ) ) return self.paginate_with_schema(builds_schema, query)
def get_failure_stats_for_project(self, project, start_period, end_period): stats = { 'Test Failures': 0, 'Missing Tests': 0, } # TODO(dcramer): we should embed this logic into the job/build results failing_builds = Build.query.join( Source, Source.id == Build.source_id, ).filter( Source.patch_id == None, # NOQA Build.project_id == project.id, Build.status == Status.finished, Build.result == Result.failed, Build.date_created >= start_period, Build.date_created < end_period, ).options( subqueryload_all('stats'), ) for build in failing_builds: build_stats = dict((s.name, s.value) for s in build.stats) if build_stats.get('test_failures', 0): stats['Test Failures'] += 1 if build_stats.get('tests_missing', 0): stats['Missing Tests'] += 1 return stats
def create_job(job_id): job = Job.query.get(job_id) if not job: return job_plan = JobPlan.query.options(subqueryload_all('plan.steps')).filter( JobPlan.job_id == job.id, ).join(Plan).first() try: if not job_plan: raise UnrecoverableException( 'Got create_job task without job plan: %s' % (job_id, )) try: step = job_plan.plan.steps[0] except IndexError: raise UnrecoverableException('Missing steps for plan') implementation = step.get_implementation() implementation.execute(job=job) except UnrecoverableException: job.status = Status.finished job.result = Result.aborted current_app.logger.exception('Unrecoverable exception creating %s', job_id) return sync_job.delay( job_id=job.id.hex, task_id=job.id.hex, parent_task_id=job.build_id.hex, )
def eager_query(cls): from sqlalchemy import orm query = super(Program, cls).eager_query() return query.options( orm.subqueryload_all('program_directives.directive'), orm.subqueryload('cycles'))
def contest_move_index(context, request): """An alternate move index, displaying contest data instead of battle data. """ supercategories = ( db.DBSession.query(db.ContestSupercategory) .order_by(db.ContestSupercategory.id) .options(subqueryload_all('categories.moves')) .all() ) pure_points_moves = ( db.DBSession.query(db.Move) .join(db.ContestCategory) .filter(db.ContestCategory.identifier == 'pure-points') .order_by(db.Move.appeal.desc(), db.Move.jam, db.Move.name) .all() ) pure_points_moves = itertools.groupby(pure_points_moves, lambda move: (move.appeal, move.jam)) return {'supercategories': supercategories, 'pure_points_moves': pure_points_moves}
def get_people(filters, page_size=25): q = Person.query q = add_person_leaderboard_filters(q) q = q.options( orm.subqueryload_all( Person.contributions, Contribution.package ) ) for k, v in filters.iteritems(): if k == "tags": pass # don't do anything for these for now for people else: if k == "host": k = "main_language" v = make_language(v) attr = getattr(Person, k) q = q.filter(attr==v) total_count = q.count() q = q.order_by(Person.impact.desc()) q = q.limit(page_size) objects = q.all() return (total_count, objects)
def create_job(job_id): job = Job.query.get(job_id) if not job: return job_plan = JobPlan.query.options( subqueryload_all('plan.steps') ).filter( JobPlan.job_id == job.id, ).join(Plan).first() try: if not job_plan: raise UnrecoverableException('Got create_job task without job plan: %s' % (job_id,)) try: step = job_plan.plan.steps[0] except IndexError: raise UnrecoverableException('Missing steps for plan') implementation = step.get_implementation() implementation.execute(job=job) except UnrecoverableException: job.status = Status.finished job.result = Result.aborted current_app.logger.exception('Unrecoverable exception creating %s', job_id) return sync_job.delay( job_id=job.id.hex, task_id=job.id.hex, parent_task_id=job.build_id.hex, )
def get(self, user_id): """ Return a list of builds for the given user. """ if user_id == 'me': user = auth.get_current_user() if not user: return self.error('not authenticated', 401) else: user = User.query.get(user_id) query = Build.query.options( joinedload('repository'), contains_eager('source'), joinedload('source').joinedload('author'), joinedload('source').joinedload('revision'), joinedload('source').joinedload('patch'), subqueryload_all('stats'), ).join( Source, Build.source_id == Source.id, ).filter( Source.author_id.in_( db.session.query(Author.id).filter( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True, # NOQA ))))).order_by(Build.date_created.desc()) return self.paginate_with_schema(builds_schema, query)
def go(): eq_(sess.query(Company) .options(subqueryload_all( Company.employees.of_type(Engineer), Engineer.machines)) .all(), expected)
def get_people(filters, page_size=25): q = Person.query q = add_person_leaderboard_filters(q) q = q.options( orm.subqueryload_all( Person.contributions, Contribution.package ) ) for k, v in filters.iteritems(): if k == "tags": pass # don't do anything for these for now for people else: if k == "host": k = "main_language" v = make_language(v) attr = getattr(Person, k) q = q.filter(attr==v) total_count = q.count() q = q.filter(Person.impact != None) q = q.order_by(Person.impact.desc()) q = q.limit(page_size) objects = q.all() return (total_count, objects)
def get(self): """ Return a list of builds. """ # tenants automatically restrict this query but we dont want # to include public repos tenant = auth.get_current_tenant() if not tenant.repository_ids: return self.respond([]) query = ( Build.query.options( joinedload("repository"), joinedload("revision"), subqueryload_all("revision.authors"), subqueryload_all("stats"), subqueryload_all("authors"), ) .filter(Build.repository_id.in_(tenant.repository_ids)) .order_by(Build.date_created.desc()) ) user = request.args.get("user") if user: if user == "me": user = auth.get_current_user() else: user = User.query.get(user) if not user: return self.respond([]) query = query.filter( Build.authors.any( Author.email.in_( db.session.query(Email.email).filter( Email.user_id == user.id, Email.verified == True # NOQA ) ) ) ) repository = request.args.get("repository") if repository: repo = Repository.from_full_name(repository) if not repo: return self.respond([]) query = query.filter(Build.repository_id == repo.id) return self.paginate_with_schema(builds_schema, query)
def get(self, job_id): job = Job.query.options( subqueryload_all(Job.phases), joinedload('project', innerjoin=True), ).get(job_id) if job is None: return '', 404 phase_list = list(JobPhase.query.options( subqueryload_all(JobPhase.steps, JobStep.node), ).filter( JobPhase.job_id == job.id, ).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc())) return self.respond(self.serialize(phase_list, { JobPhase: JobPhaseWithStepsSerializer(), }))
def _lookup_asset(self, asset_id): session = Session() asset = session.query(Asset)\ .filter(Asset.id == asset_id)\ .options(subqueryload_all(Asset.gateway_assets, GatewayAsset.gateway))\ .first() session.close() return asset
def get_idruns(self, idruns, bqs): """Function accepts a list of idruns of the format \d{4}[A-F]_\d{2} (4 digits followed by a letter A through F *underscore* minute number). Returns a list of IDrun objects with loaded adducts and isotopes. """ return self.session.query(dbtables.Adduct).join(dbtables.IDrun).filter(dbtables.IDrun.name.\ in_(idruns)).filter(dbtables.Adduct.bq.in_(bqs)).options(\ subqueryload_all('isotopes')).all()
def fetch_builds_for_revisions( revisions: List[Revision], with_relations=True ) -> List[Tuple[Tuple[UUID, str], MetaBuild]]: # we query extra builds here, but its a lot easier than trying to get # sqlalchemy to do a ``select (subquery)`` clause and maintain tenant # constraints if not revisions: return [] lookups = [] for revision in revisions: lookups.append( and_( Build.repository_id == revision.repository_id, Build.revision_sha == revision.sha, ) ) base_qs = Build.query if with_relations: base_qs = base_qs.options( joinedload("revision"), subqueryload_all("authors"), subqueryload_all("revision.authors"), subqueryload_all("stats"), ) builds = list( (base_qs.filter(reduce(or_, lookups)).order_by(Build.revision_sha)) # NOQA ) build_groups = groupby( builds, lambda build: (build.repository_id, build.revision_sha) ) required_hook_ids: Set[str] = set() for build in builds: required_hook_ids.update(build.data.get("required_hook_ids") or ()) return [ ( ident, merge_build_group( list(build_group), required_hook_ids, with_relations=with_relations ), ) for ident, build_group in build_groups ]
def get(self, job_id): get_test_counts = request.args.get('test_counts', False) job = Job.query.options( subqueryload_all(Job.phases), joinedload('project', innerjoin=True), ).get(job_id) if job is None: return '', 404 phase_list = list(JobPhase.query.options( subqueryload_all(JobPhase.steps, JobStep.node), subqueryload_all(JobPhase.steps, JobStep.logsources) ).filter( JobPhase.job_id == job.id, ).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc())) test_counts = {} if get_test_counts: rows = list(db.session.query( TestCase.step_id, func.count() ).filter( TestCase.job_id == job.id, TestCase.result == Result.failed, ).group_by(TestCase.step_id)) for row in rows: test_counts[row[0]] = row[1] logsource_registry = {LogSource: LogSourceWithoutStepCrumbler()} context = [] for phase, phase_data in zip(phase_list, self.serialize(phase_list)): phase_data['steps'] = [] for step, step_data in zip(phase.steps, self.serialize(list(phase.steps))): step_data['logSources'] = self.serialize(list(step.logsources), extended_registry=logsource_registry) if step.id in test_counts: step_data['testFailures'] = test_counts[step.id] step_data['commandTypeDurations'] = defaultdict(int) for command in step.commands: if command.duration is not None: step_data['commandTypeDurations'][command.type.name] += command.duration phase_data['steps'].append(step_data) context.append(phase_data) return self.respond(context, serialize=False)
def eager_query(cls): from sqlalchemy import orm query = super(Directive, cls).eager_query() return query.options( orm.joinedload('audit_frequency'), orm.joinedload('audit_duration'), orm.subqueryload('controls'), orm.subqueryload_all('program_directives.program'), orm.subqueryload('sections'))
def get(self, project_id): project = Project.get(project_id) if project is None: return '', 404 plans = Plan.query.options( subqueryload_all(Plan.steps), ).filter( Plan.projects.contains(project), ) last_build = Build.query.options( joinedload('author'), contains_eager('source') ).join( Source, Build.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Build.project == project, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() if not last_build or last_build.result == Result.passed: last_passing_build = last_build else: last_passing_build = Build.query.options( joinedload('author'), contains_eager('source') ).join( Source, Build.source_id == Source.id, ).filter( Source.patch_id == None, # NOQA Build.project == project, Build.result == Result.passed, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() options = dict( (o.name, o.value) for o in ProjectOption.query.filter( ProjectOption.project_id == project.id, ) ) for key, value in OPTION_DEFAULTS.iteritems(): options.setdefault(key, value) data = self.serialize(project) data['lastBuild'] = last_build data['lastPassingBuild'] = last_passing_build data['repository'] = project.repository data['plans'] = list(plans) data['options'] = options data['stats'] = self._get_stats(project) return self.respond(data)
def test_from_aliased(self): users, Dingaling, User, dingalings, Address, addresses = (self.tables.users, self.classes.Dingaling, self.classes.User, self.tables.dingalings, self.classes.Address, self.tables.addresses) mapper(Dingaling, dingalings) mapper(Address, addresses, properties={ 'dingalings':relationship(Dingaling, order_by=Dingaling.id) }) mapper(User, users, properties={ 'addresses':relationship( Address, order_by=Address.id) }) sess = create_session() u = aliased(User) q = sess.query(u).options(subqueryload(u.addresses)) def go(): eq_( [User(id=7, addresses=[ Address(id=1, email_address='*****@*****.**')])], q.filter(u.id==7).all() ) self.assert_sql_count(testing.db, go, 2) def go(): eq_( self.static.user_address_result, q.order_by(u.id).all() ) self.assert_sql_count(testing.db, go, 2) q = sess.query(u).\ options(subqueryload_all(u.addresses, Address.dingalings)) def go(): eq_( [ User(id=8, addresses=[ Address(id=2, email_address='*****@*****.**', dingalings=[Dingaling()]), Address(id=3, email_address='*****@*****.**'), Address(id=4, email_address='*****@*****.**'), ]), User(id=9, addresses=[ Address(id=5, dingalings=[Dingaling()]) ]), ], q.filter(u.id.in_([8, 9])).all() ) self.assert_sql_count(testing.db, go, 3)
def person_load_options(): from models.person import Person from models.contribution import Contribution from models.package import Package my_options = orm.subqueryload_all( Person.contributions, Contribution.package, Package.contributions # Contribution.person, # Person.contributions ) return my_options
def get(self, build: Build): """ Return bundle stats for a given build. """ job_ids = db.session.query( Job.id).filter(Job.build_id == build.id).subquery() query = (Bundle.query.filter(Bundle.job_id.in_(job_ids)).options( subqueryload_all(Bundle.assets)).order_by(Bundle.name.asc())) return self.paginate_with_schema(bundle_schema, query)
def get(self): """ Return a list of builds. """ query = Build.query.options( joinedload('source').joinedload('author'), joinedload('source').joinedload('revision'), joinedload('source').joinedload('patch'), subqueryload_all('stats'), ).order_by(Build.date_created.desc()).limit(100) return self.respond_with_schema(builds_schema, query)
def go(): d = sess.query(Node).filter_by(data='n1').\ options(subqueryload_all('children.children')).first() eq_(Node(data='n1', children=[ Node(data='n11'), Node(data='n12', children=[ Node(data='n121'), Node(data='n122'), Node(data='n123') ]), Node(data='n13') ]), d)
def transaction(request, info, session, transid): t = session\ .query(Transaction)\ .options(subqueryload_all('payments'), joinedload('lines.department'), joinedload('lines.user'), undefer('total'), undefer('discount_total'))\ .get(int(transid)) if not t: raise Http404 return ('transaction.html', {'transaction': t, 'tillobject': t})
def get(self, project: Project): """ Return a list of builds for the given project. """ query = Build.query.options( joinedload('source').joinedload('author'), joinedload('source').joinedload('revision'), joinedload('source').joinedload('patch'), subqueryload_all('stats'), ).filter(Build.project_id == project.id, ).order_by( Build.number.desc()).limit(100) return self.respond_with_schema(builds_schema, query)
def get(self, job_id): get_test_counts = request.args.get('test_counts', False) job = Job.query.options( subqueryload_all(Job.phases), joinedload('project', innerjoin=True), ).get(job_id) if job is None: return '', 404 phase_list = list(JobPhase.query.options( subqueryload_all(JobPhase.steps, JobStep.node), subqueryload_all(JobPhase.steps, JobStep.logsources) ).filter( JobPhase.job_id == job.id, ).order_by(JobPhase.date_started.asc(), JobPhase.date_created.asc())) test_counts = {} if get_test_counts: rows = list(db.session.query( TestCase.step_id, func.count() ).filter( TestCase.job_id == job.id, TestCase.result == Result.failed, ).group_by(TestCase.step_id)) for row in rows: test_counts[row[0]] = row[1] context = [] for phase, phase_data in zip(phase_list, self.serialize(phase_list)): phase_data['steps'] = [] for step, step_data in zip(phase.steps, self.serialize(list(phase.steps))): step_data['logSources'] = self.serialize(list(step.logsources)) if step.id in test_counts: step_data['testFailures'] = test_counts[step.id] phase_data['steps'].append(step_data) context.append(phase_data) return self.respond(context, serialize=False)
def test_loads_second_level_collection_to_collection(self): User, Order, Item, sess = self._collection_to_collection_fixture() u1 = sess.query(User).get(7) u1.orders o1 = Order() u1.orders.append(o1) sess.query(User).options(subqueryload_all("orders.items")).filter_by(id=7).all() for o in u1.orders: if o is not o1: assert "items" in o.__dict__ else: assert "items" not in o.__dict__
def go(): d = sess.query(Node).filter_by(data="n1").options(subqueryload_all("children.children")).first() eq_( Node( data="n1", children=[ Node(data="n11"), Node(data="n12", children=[Node(data="n121"), Node(data="n122"), Node(data="n123")]), Node(data="n13"), ], ), d, )
def get(self, test_id): testcase = TestCase.query.options( subqueryload_all('artifacts'), subqueryload_all('messages') ).get(test_id) if testcase is None: return '', 404 context = self.serialize(testcase) context['message'] = xunit.get_testcase_messages(testcase) context['step'] = self.serialize(testcase.step) context['artifacts'] = self.serialize(testcase.artifacts) # XXX(dcramer): we assume one log per step context['logSource'] = self.serialize(LogSource.query.filter( LogSource.step_id == testcase.step_id, ).first()) return self.respond(context)