def get_list_requests(user_id): db = current_app.config["database"] reqs = db.session.query(Request, RequestStatus).\ join(RequestStatus, Request.request_status_id == RequestStatus.id). \ filter(Request.user_id == user_id). \ filter(Request.date.isnot(None)). \ options( Load(Request).load_only( "id", "date", "telegraph" ), Load(RequestStatus).load_only( "text" ) ) res = [] for req, status in reqs: dct = { "id": req.id, "date": req.date, "telegraph": req.telegraph, "status": status.text } res.append(dct) return ok(res)
def _query_select_options(self, query, select_columns=None): """ Add select load options to query. The goal is to only SQL select what is requested :param query: SQLAlchemy Query obj :param select_columns: (list) of columns :return: SQLAlchemy Query obj """ if select_columns: _load_options = list() for column in select_columns: if "." in column: model_relation = self.get_related_model(column.split(".")[0]) if not self.is_model_already_joinded(query, model_relation): query = query.join(model_relation) _load_options.append( Load(model_relation).load_only(column.split(".")[1]) ) else: if not self.is_relation(column) and not hasattr( getattr(self.obj, column), "__call__" ): _load_options.append(Load(self.obj).load_only(column)) else: _load_options.append(Load(self.obj)) query = query.options(*tuple(_load_options)) return query
def get_descriptions(self, code, with_pdb): #icode = "%{:}%".format(code) subquery = scoped_db.query(PfamA.pfamA_acc) subquery = subquery.filter( or_(PfamA.pfamA_acc == code.upper(), PfamA.pfamA_id.ilike(code))).distinct().subquery() query = scoped_db.query( concat(Pfamseq.pfamseq_id, '/', cast(PfamARegFullSignificant.seq_start, types.Unicode), '-', cast(PfamARegFullSignificant.seq_end, types.Unicode))) query = query.outerjoin( PfamARegFullSignificant, and_(Pfamseq.pfamseq_acc == PfamARegFullSignificant.pfamseq_acc, PfamARegFullSignificant.in_full == 1)) query = query.filter( PfamARegFullSignificant.pfamA_acc == subquery.c.pfamA_acc) if with_pdb: subquery2 = scoped_db.query(PdbPfamAReg) subquery2 = subquery2.filter(PdbPfamAReg.pfamA_acc == subquery.c. pfamA_acc).distinct().subquery() query = query.filter( PfamARegFullSignificant.pfamseq_acc == subquery2.c.pfamseq_acc) query = query.options( Load(Pfamseq).load_only('pfamseq_id'), Load(PfamARegFullSignificant).load_only("seq_start", "seq_end")) query = query.order_by(Pfamseq.pfamseq_id.asc()) return query.distinct().all()
def get_sequence_descriptions_from_pfam_without_join_table(pfam, with_pdb): subquery = get_pfam_acc_from_pfam(pfam) subquery = subquery.distinct().subquery() query = db.session.query( concat(Pfamseq.pfamseq_id, '/', cast(PfamARegFullSignificant.seq_start, types.Unicode), '-', cast(PfamARegFullSignificant.seq_end, types.Unicode))) query = query.join( PfamARegFullSignificant, Pfamseq.pfamseq_acc == PfamARegFullSignificant.pfamseq_acc) query = query.filter( PfamARegFullSignificant.pfamA_acc == subquery.c.pfamA_acc) if with_pdb: subquery2 = db.session.query(PdbPfamAReg) subquery2 = subquery2.filter(PdbPfamAReg.pfamA_acc == subquery.c. pfamA_acc).distinct().subquery() query = query.filter( PfamARegFullSignificant.pfamseq_acc == subquery2.c.pfamseq_acc) query = query.filter(PfamARegFullSignificant.in_full) query = query.options( Load(Pfamseq).load_only('pfamseq_id'), Load(PfamARegFullSignificant).load_only("seq_start", "seq_end")) query = query.order_by(Pfamseq.pfamseq_id.asc()).distinct() results = query.all() return [r[0] for r in results]
def index(): artists_result = db.session.query(Artist) \ .options(Load(Artist).load_only('name', "created_at")) \ .order_by(Artist.created_at.desc()).limit(10).all() venues_result = db.session.query(Venue) \ .options(Load(Venue).load_only('name', "created_at")) \ .order_by(Venue.created_at.desc()).limit(10).all() def mapper_factory(type): def mapper(model): return { "type": type, "id": model.id, "name": model.name, "created_at": model.created_at } return mapper artists = list(map(mapper_factory('artist'), artists_result)) venues = list(map(mapper_factory('venue'), venues_result)) all_models = venues + artists latest = sorted(all_models, key=lambda x: x['id'], reverse=True)[:10] return render_template('pages/home.html', latest=latest)
def get_employee_with_dept(db: Session): # return db.query(models.Employee, models.Department).\ # join(models.Department).all() rows = db.query(models.Employee, models.Department)\ .join(models.Employee.department)\ .options( Load(models.Employee).load_only("name", "email"), Load(models.Department).load_only("dept_name") )\ .all() attrs = ['name', 'id', 'email', 'dept_name'] # Build the mappings using dictionary comprehensions # mappings = [{attr: getattr(e, attr) for attr in attrs} for e in rows] mappings = [] for employee in rows: d = { 'name': employee.name, 'id': employee.id, 'email': employee.email, 'dept_name': employee.department.dept_name } mappings.append(d) return mappings
def _query_select_options(self, query, select_columns=None): """ Add select load options to query. The goal is to only SQL select what is requested :param query: SQLAlchemy Query obj :param select_columns: (list) of columns :return: SQLAlchemy Query obj """ if select_columns: _load_options = list() for column in select_columns: query, relation_tuple = self._query_join_dotted_column( query, column) model_relation, relation_join = relation_tuple or (None, None) if model_relation: _load_options.append( Load(model_relation).load_only(column.split(".")[1])) else: # is a custom property method field? if hasattr(getattr(self.obj, column), "fget"): pass # is not a relation and not a function? elif not self.is_relation(column) and not hasattr( getattr(self.obj, column), "__call__"): _load_options.append(Load(self.obj).load_only(column)) else: _load_options.append(Load(self.obj)) query = query.options(*tuple(_load_options)) return query
def get(self, problem_id: int): args = parser.parse(get_args, request) query = self._build_query_by_args(args, problem_id) query = query.options( Load(Problem).load_only('id', 'name'), Load(SimpleUser).load_only('id', 'firstname', 'lastname')) per_page_count = args.get('count') page = args.get('page') result = query.paginate(page=page, per_page=per_page_count, error_out=False, max_per_page=100) runs = [] for run, user, problem in result.items: run.user = user run.problem = problem runs.append(run) metadata = {'count': result.total, 'page_count': result.pages} schema = RunSchema(many=True) data = schema.dump(runs) return flask_jsonify({ 'result': 'success', 'data': data.data, 'metadata': metadata })
def get(self, course_module_id): course_module = db.session.query(CourseModule) \ .filter(CourseModule.id == course_module_id) \ .filter(CourseModule.visible == 1) \ .one_or_none() if course_module is None: raise NotFound(f'Cannot find course module id #{course_module_id}') contest = course_module.instance if not isinstance(contest, Statement): raise BadRequest('This resource is not implemented yet') problems_statement_problems = db.session.query(Problem, StatementProblem) \ .join(StatementProblem, StatementProblem.problem_id == Problem.id) \ .filter(StatementProblem.statement_id == contest.id) \ .filter(StatementProblem.hidden == 0) \ .options(Load(Problem).load_only('id', 'name')) \ .options(Load(StatementProblem).load_only('rank')) problems = [] # Yes it is ugly but I think its better than rewrite query for problem, sp in problems_statement_problems.all(): problem.rank = sp.rank problems.append(problem) contest.problems = problems contest_schema = ContestSchema() response = contest_schema.dump(contest) return jsonify(response.data)
def get_vacancies(hh_vac, hh_dom=None): # 2 лист из списка нужных полей c = [] ii = 0 if hh_dom is not None: for cc, i in Session.query(hh_vacancy, hh_domain).join(hh_domain) \ .options( Load(hh_vacancy).load_only(*hh_vac), Load(hh_domain).load_only(*hh_dom) ).all(): d = cc.__dict__ d.update(i.__dict__) c.append(d) c[ii].pop("_sa_instance_state") ii = ii + 1 else: for cc in Session.query(hh_vacancy).join(hh_domain) \ .options( Load(hh_vacancy).load_only(*hh_vac), ).all(): d = cc.__dict__ c.append(d) c[ii].pop("_sa_instance_state") ii = ii + 1 return c # возвращает список из словарей выбранных полей
def test_load_only_parent_specific(self): User = self.classes.User Address = self.classes.Address Order = self.classes.Order users = self.tables.users addresses = self.tables.addresses orders = self.tables.orders mapper(User, users) mapper(Address, addresses) mapper(Order, orders) sess = create_session() q = sess.query(User, Order, Address).options( Load(User).load_only("name"), Load(Order).load_only("id"), Load(Address).load_only("id", "email_address")) self.assert_compile( q, "SELECT users.id AS users_id, " "users.name AS users_name, " "orders.id AS orders_id, " "addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM users, orders, addresses")
def supplement_halo_query(self, halo_query, halo_alias=None): """Return a sqlalchemy query with a supplemental join to allow this calculation to run efficiently halo_query: The query that returns the simulation objects on which calculations are going to be made halo_alias: The alias for the simulation object class being referenced (or None to use SimulationObjectBase)""" name_targets = self.retrieves_dict_ids() if halo_alias is None: halo_alias = tangos.core.halo.SimulationObjectBase augmented_query = halo_query order_bys = [] current_join_path = None for i in range(self.n_join_levels()): halo_property_alias = aliased(tangos.core.halo_data.HaloProperty) halo_link_alias = aliased(tangos.core.halo_data.HaloLink) if len(name_targets) > 0: property_name_condition = halo_property_alias.name_id.in_( name_targets) link_name_condition = ( halo_link_alias.relation_id.in_(name_targets)) else: # We know we're joining to a null list of properties; however simply setting these conditions # to False results in an apparently efficient SQL query (boils down to 0==1) which actually # takes a very long time to execute if the link or propery tables are large. Thus, compare # to an impossible value instead. property_name_condition = halo_property_alias.name_id == -1 link_name_condition = halo_link_alias.relation_id == -1 augmented_query =augmented_query.outerjoin(halo_property_alias, (halo_alias.id==halo_property_alias.halo_id) & property_name_condition).\ outerjoin(halo_link_alias, (halo_alias.id==halo_link_alias.halo_from_id) & link_name_condition) augmented_query = augmented_query.options( Load(halo_alias).contains_eager( halo_alias.all_properties, alias=halo_property_alias).undefer("*"), Load(halo_alias).contains_eager(halo_alias.all_links, alias=halo_link_alias), ) order_bys += [halo_link_alias.id, halo_property_alias.id] if i < self.n_join_levels() - 1: next_level_halo_alias = aliased( tangos.core.halo.SimulationObjectBase) augmented_query = augmented_query.outerjoin(next_level_halo_alias, (halo_link_alias.halo_to_id==next_level_halo_alias.id)).\ options(Load(halo_link_alias).\ contains_eager(halo_link_alias.halo_to, alias=next_level_halo_alias) ) halo_alias = next_level_halo_alias augmented_query = augmented_query.order_by(*order_bys) return augmented_query
def process(self, problem_id: int, user_ids): args = parser.parse(get_args, request) query = self._build_query_by_args(args, problem_id, user_ids) per_page_count = args.get('count') page = args.get('page') result = query.paginate(page=page, per_page=per_page_count, error_out=False, max_per_page=100) runs = [] problem_ids = set() user_ids = set() for run in result.items: problem_ids.add(run.problem_id) user_ids.add(run.user_id) problems_result = db.session.query(Problem).filter( Problem.id.in_(problem_ids)).options( Load(Problem).load_only('id', 'name')) problems = dict() for problem in problems_result: problems[problem.id] = problem users_result = db.session.query(SimpleUser).filter( SimpleUser.id.in_(user_ids)).options( Load(SimpleUser).load_only('id', 'firstname', 'lastname')) users = dict() for u in users_result: users[u.id] = u for run in result.items: if run.user_id > 0: if run.user_id in users: run.user = users[run.user_id] run.problem = problems[run.problem_id] if args.get('include_source'): run.code = base64.b64encode(run.source) runs.append(run) metadata = {'count': result.total, 'page_count': result.pages} schema = RunSchema(many=True) data = schema.dump(runs) return flask_jsonify({ 'result': 'success', 'data': data.data, 'metadata': metadata })
def get_entries_commits(full_base_query): """ Takes a base query and only selects commit relevant data. :param full_base_query: :return: """ entries_commits = full_base_query.options(Load(Vulnerability).defer('*')) entries_commits = entries_commits.options(Load(Nvd).defer('*')) entries_commits = entries_commits.options(joinedload( Vulnerability.commits)) entries_subset = entries_commits.all() return entries_subset
def sip_lines_for_device(device_id): query = (Session.query(LineFeatures, UserSIP, Extension).join( LineFeatures.endpoint_sip).join(LineFeatures.user_lines).join( UserLine.main_user_rel).join(LineFeatures.line_extensions).join( LineExtension.main_extension_rel).filter( LineFeatures.device == device_id).options( Load(LineFeatures).load_only("id", "configregistrar"), Load(UserSIP).load_only("id", "callerid", "name", "secret"), Load(Extension).load_only("id", "exten"), )) return query.all()
def get_row(self, trunk_id): query = (self.session.query( TrunkFeatures, UserIAX, UserCustom).outerjoin(TrunkFeatures.endpoint_sip).outerjoin( TrunkFeatures.endpoint_iax).outerjoin( TrunkFeatures.endpoint_custom).options( Load(TrunkFeatures).load_only("id", "context"), Load(UserIAX).load_only("id", "category", "context"), Load(UserCustom).load_only( "id", "category", "context")).filter(TrunkFeatures.id == trunk_id)) return query.first()
def test_query_expr(self): (User, ) = self.classes("User") self._run_cache_key_fixture( lambda: ( with_expression(User.name, true()), with_expression(User.name, null()), with_expression(User.name, func.foobar()), with_expression(User.name, User.name == "test"), Load(User).with_expression(User.name, true()), Load(User).with_expression(User.name, null()), Load(User).with_expression(User.name, func.foobar()), Load(User).with_expression(User.name, User.name == "test"), ), compare_values=True, )
def manage_project_releases(project, request): # Get the counts for all the files for this project, grouped by the # release version and the package types filecounts = (request.db.query( Release.version, File.packagetype, func.count(File.id)).options( Load(Release).load_only("version")).outerjoin(File).group_by( Release.id).group_by( File.packagetype).filter(Release.project == project).all()) # Turn rows like: # [('0.1', 'bdist_wheel', 2), ('0.1', 'sdist', 1)] # into: # { # '0.1: { # 'bdist_wheel': 2, # 'sdist': 1, # 'total': 3, # } # } version_to_file_counts = {} for version, packagetype, count in filecounts: packagetype_to_count = version_to_file_counts.setdefault(version, {}) packagetype_to_count.setdefault("total", 0) packagetype_to_count[packagetype] = count packagetype_to_count["total"] += count return { "project": project, "version_to_file_counts": version_to_file_counts }
def migrate_roles( session: Session, pvm_key_map: PvmMigrationMapType, commit: bool = False, ) -> None: """ Migrates all existing roles that have the permissions to be migrated """ # Collect a map of PermissionView objects for migration pvm_map: Dict[PermissionView, List[PermissionView]] = {} for old_pvm_key, new_pvms_ in pvm_key_map.items(): old_pvm = _find_pvm(session, old_pvm_key.view, old_pvm_key.permission) if old_pvm: for new_pvm_key in new_pvms_: new_pvm = _find_pvm(session, new_pvm_key.view, new_pvm_key.permission) if old_pvm not in pvm_map: pvm_map[old_pvm] = [new_pvm] else: pvm_map[old_pvm].append(new_pvm) # Replace old permissions by the new ones on all existing roles roles = session.query(Role).options(Load(Role).joinedload(Role.permissions)).all() for role in roles: for old_pvm, new_pvms in pvm_map.items(): if old_pvm in role.permissions: logger.info(f"Removing {old_pvm} from {role}") role.permissions.remove(old_pvm) for new_pvm in new_pvms: if new_pvm not in role.permissions: logger.info(f"Add {new_pvm} to {role}") role.permissions.append(new_pvm) session.merge(role) # Delete old permissions _delete_old_permissions(session, pvm_map) if commit: session.commit()
def test_query_opts_key_bound_branching(self): A, B, C, D, E, F, G = self.classes("A", "B", "C", "D", "E", "F", "G") base = Load(A).joinedload(A.bs) opts = [ base.joinedload(B.cs), base.joinedload(B.ds), base.joinedload(B.es), base.joinedload(B.fs), ] q = fixture_session().query(A) context = q._compile_state() @profiling.function_call_count(warmup=1) def go(): q2 = q.options(opts) context.query = q2 context.attributes = q2._attributes = { "_unbound_load_dedupes": set() } for opt in q2._with_options: opt.process_compile_state(context) go()
def find_areas(session, request_id): resp = session.query(Area, Request). \ filter(ST_Contains(Area.coordinate, Request.coordinate)). \ filter(Request.id == request_id).\ options(Load(Area).load_only("id")) # Returns 'Area' objects only. return list(map(lambda r: r[0], resp))
def _get_query(self) -> 'Query': """ Build, filter and sort the query. Returns: SQLAlchemy query. """ subquery = graphene_sqlalchemy.get_query(self.model, self.info.context) request_filters = self.graphql_args.get(self.filter_arg) if request_filters: filter_set = self._get_filter_set(self.info) subquery = filter_set.filter(self.info, subquery, request_filters) aliased_model = aliased(self.model, subquery.subquery(with_labels=True)) query = (graphene_sqlalchemy.get_query( self.parent_model, self.info.context).join(aliased_model, self.relation).options( contains_eager(self.relation, alias=aliased_model), Load(self.parent_model).load_only(*self.parent_model_pks), )) query = self._sorted_query(query, self.graphql_args.get('sort'), aliased_model) return query
def all_scales(): scales = db_session.query(MusicScale).options( joinedload(MusicScale.names, innerjoin=True), Load(MusicScale).raiseload('*')).filter(MusicScale.tones == 12) return {'scales': [s.serialize() for s in scales]}
def _test_load_only_propagate(self, use_load): User = self.classes.User Address = self.classes.Address users = self.tables.users addresses = self.tables.addresses mapper(User, users, properties={ "addresses": relationship(Address) }) mapper(Address, addresses) sess = create_session() expected = [ ("SELECT users.id AS users_id, users.name AS users_name " "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}), ] if use_load: opt = Load(User).defaultload(User.addresses).load_only("id", "email_address") else: opt = defaultload(User.addresses).load_only("id", "email_address") q = sess.query(User).options(opt).filter(User.id.in_([7, 8])) def go(): for user in q: user.addresses self.sql_eq_(go, expected)
def work_search_view(request): offset = request.validated['querystring']['offset'] limit = request.validated['querystring']['limit'] order_by = [Work.title.asc()] query = request.validated['querystring'].get('query') type = request.validated['querystring'].get('type') filters = [] if query: filters.append(Work.search_terms.match(query)) if type: filters.append(Work.type == type) from_query = request.context.session.query(Work) from_query = from_query.options( Load(Work).load_only('id', 'title')) # allow search listing with editor principals listing = request.context.search( filters=filters, offset=offset, limit=limit, order_by=order_by, format=format, from_query=from_query, principals=['group:editor']) snippets = [] for hit in listing['hits']: snippets.append({'id': hit.id, 'info': hit.type, 'name': hit.title}) return {'total': listing['total'], 'snippets': snippets, 'limit': limit, 'offset': offset, 'status': 'ok'}
def go(): l = q.options( Load(Order).undefer_group('primary').undefer_group('secondary')).all() o2 = l[2] eq_(o2.opened, 1) eq_(o2.userident, 7) eq_(o2.description, 'order 3')
def get_all_available_schedule(): from_date = datetime.now() station1 = aliased(Station) station2 = aliased(Station) query = db_session.query(Schedule)\ .join(station1, station1.id == Schedule.departure_station_id) \ .join(station2, station2.id == Schedule.arrival_station_id) \ .options( Load(Schedule).load_only("id", "arrival_time", "departure_time", "price"). contains_eager(Schedule.departure_station, alias=station1).load_only("id", "name"), contains_eager(Schedule.arrival_station, alias=station2).load_only("id", "name") ).filter(Schedule.departure_time >= from_date) # Equivalent SQL but no contains eager # query = db_session.query(Schedule)\ # .join(station1, station1.id == Schedule.departure_station_id) \ # .join(station2, station2.id == Schedule.arrival_station_id). \ # options( # Load(Schedule).load_only("id", "arrival_time", "departure_time", "price"), # Load(station1).load_only("id", "name"), # Load(station2).load_only("id", "name"))\ # .filter(Schedule.departure_time >= from_date) request = query.all() return request
def test_set_strat_col(self): User = self.classes.User l1 = Load(User) l2 = l1.defer("name") l3 = list(l2.context.values())[0] eq_(l1.context, {('loader', self._make_path([User, "name"])): l3})
def get_descriptions(self, code, with_pdb): subquery = scoped_db.query(PfamA) subquery = subquery.filter( or_(PfamA.pfamA_acc == code.upper(), PfamA.pfamA_id.ilike(code))).distinct().subquery() query1 = scoped_db.query(PfamARegFullSignificant.pfamseq_acc, PfamARegFullSignificant.seq_start, PfamARegFullSignificant.seq_end) query1 = query1.filter( PfamARegFullSignificant.pfamA_acc == subquery.c.pfamA_acc, PfamARegFullSignificant.in_full) query1 = query1.options( Load(PfamARegFullSignificant).load_only("seq_start", "seq_end")) query1 = query1.distinct().subquery() # query2 = scoped_db.query(Pfamseq.pfamseq_id) # query2 = query2.filter(Pfamseq.pfamA_acc == subquery.c.pfamA_acc).distinct().subquery() query = scoped_db.query( concat(Pfamseq.pfamseq_id, '/', cast(query1.c.seq_start, types.Unicode), '-', cast(query1.c.seq_end, types.Unicode))) query = query.filter(Pfamseq.pfamseq_acc == query1.c.pfamseq_acc) if with_pdb: subquery2 = scoped_db.query(PdbPfamAReg) subquery2 = subquery2.filter(PdbPfamAReg.pfamA_acc == subquery.c. pfamA_acc).distinct().subquery() query = query.filter( PfamARegFullSignificant.pfamseq_acc == subquery2.c.pfamseq_acc) query = query.order_by(Pfamseq.pfamseq_id.asc()) return query.distinct().all()
def test_unsafe_bound_option_cancels_bake(self): User, Address, Dingaling = self._o2m_twolevel_fixture(lazy="joined") class SubDingaling(Dingaling): pass mapper(SubDingaling, None, inherits=Dingaling) lru = Address.dingalings.property._lazy_strategy._bakery( lambda q: None )._bakery l1 = len(lru) for i in range(5): sess = Session() u1 = ( sess.query(User) .options( Load(User) .defaultload(User.addresses) .lazyload( Address.dingalings.of_type(aliased(SubDingaling)) ) ) .first() ) for ad in u1.addresses: ad.dingalings l2 = len(lru) eq_(l1, 0) eq_(l2, 1)