def test_gen_path_string_column(self): User = self.classes.User l = Load(User) eq_( l._generate_path(inspect(User)._path_registry, "name", "column"), self._make_path_registry([User, "name"]) )
def test_str(self): User = self.classes.User result = Load(User) result.strategy = (('deferred', False), ('instrument', True)) eq_( str(result), "Load(strategy=(('deferred', False), ('instrument', True)))" )
def test_gen_path_string_entity(self): User = self.classes.User Address = self.classes.Address l = Load(User) eq_( l._generate_path(inspect(User)._path_registry, "addresses", "relationship"), self._make_path_registry([User, "addresses", Address]) )
def test_gen_path_attr_column(self): User = self.classes.User result = Load(User) eq_( result._generate_path(inspect(User)._path_registry, User.name, "column"), self._make_path_registry([User, "name"]) )
def test_bound_cache_key_excluded_on_aliased(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "orders"]) au = aliased(User) opt = Load(au).joinedload(au.orders).joinedload(Order.items) eq_(opt._generate_cache_key(query_path), None)
def test_gen_path_attr_entity_invalid_noraiseerr(self): User = self.classes.User Order = self.classes.Order result = Load(User) eq_(result._generate_path(inspect(User)._path_registry, Order.items, "relationship", False), None)
def test_gen_path_attr_entity(self): User = self.classes.User Address = self.classes.Address result = Load(User) eq_( result._generate_path(inspect(User)._path_registry, User.addresses, "relationship"), self._make_path_registry([User, "addresses", Address]) )
def test_gen_path_invalid_from_col(self): User = self.classes.User result = Load(User) result.path = self._make_path_registry([User, "name"]) assert_raises_message( sa.exc.ArgumentError, "Attribute 'name' of entity 'Mapper|User|users' does " "not refer to a mapped entity", result._generate_path, result.path, User.addresses, "relationship")
def test_bound_cache_key_included_unsafe_query(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') au = aliased(User) query_path = self._make_path_registry([inspect(au), "orders"]) opt = Load(au).joinedload(au.orders).\ joinedload(Order.items) eq_(opt._generate_cache_key(query_path), False)
def test_gen_path_attr_entity_invalid_noraiseerr(self): User = self.classes.User Order = self.classes.Order result = Load(User) eq_( result._generate_path( inspect(User)._path_registry, Order.items, "relationship", False), None)
def test_set_strat_ent(self): User = self.classes.User l1 = Load(User) l2 = l1.joinedload("addresses") eq_( l1.context, { ('loader', self._make_path([User, "addresses"])): l2 } )
def test_bound_cache_key_included_of_type_safe(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "orders"]) opt = Load(User).joinedload(User.orders).\ joinedload(Order.items.of_type(SubItem)) eq_(opt._generate_cache_key(query_path), ((Order, 'items', SubItem, ('lazy', 'joined')), ))
def process(self, problem_id: int, user_ids): args = parser.parse(get_args, request) query = self._build_query_by_args(args, problem_id, user_ids) per_page_count = args.get('count') page = args.get('page') result = query.paginate(page=page, per_page=per_page_count, error_out=False, max_per_page=100) runs = [] problem_ids = set() user_ids = set() for run in result.items: problem_ids.add(run.problem_id) user_ids.add(run.user_id) problems_result = db.session.query(Problem).filter( Problem.id.in_(problem_ids)).options( Load(Problem).load_only('id', 'name')) problems = dict() for problem in problems_result: problems[problem.id] = problem users_result = db.session.query(SimpleUser).filter( SimpleUser.id.in_(user_ids)).options( Load(SimpleUser).load_only('id', 'firstname', 'lastname')) users = dict() for u in users_result: users[u.id] = u for run in result.items: if run.user_id > 0: if run.user_id in users: run.user = users[run.user_id] run.problem = problems[run.problem_id] if args.get('include_source'): run.code = base64.b64encode(run.source) runs.append(run) metadata = {'count': result.total, 'page_count': result.pages} schema = RunSchema(many=True) data = schema.dump(runs) return flask_jsonify({ 'result': 'success', 'data': data.data, 'metadata': metadata })
def test_bound_cache_key_included_unsafe_option_three(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "orders", Order, "items"]) opt = Load(User).joinedload(User.orders).\ joinedload(Order.items.of_type(aliased(SubItem))) eq_( opt._generate_cache_key(query_path), False )
def test_set_strat_col(self): User = self.classes.User l1 = Load(User) l2 = l1.defer("name") l3 = list(l2.context.values())[0] eq_( l1.context, { ('loader', self._make_path([User, "name"])): l3 } )
def test_gen_path_invalid_from_col(self): User = self.classes.User result = Load(User) result.path = self._make_path_registry([User, "name"]) assert_raises_message( sa.exc.ArgumentError, "Attribute 'name' of entity 'Mapper|User|users' does " "not refer to a mapped entity", result._generate_path, result.path, User.addresses, "relationship" )
def test_bound_cache_key_excluded_on_other(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry( [User, "addresses"]) opt = Load(User).joinedload(User.orders).joinedload(Order.items) eq_( opt._generate_cache_key(query_path), None )
def get_entries_commits(full_base_query): """ Takes a base query and only selects commit relevant data. :param full_base_query: :return: """ entries_commits = full_base_query.options(Load(Vulnerability).defer('*')) entries_commits = entries_commits.options(Load(Nvd).defer('*')) entries_commits = entries_commits.options(joinedload( Vulnerability.commits)) entries_subset = entries_commits.all() return entries_subset
def test_bound_cache_key_included_safe_w_option(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') opt = Load(User).defaultload("orders").joinedload( "items", innerjoin=True).defer("description") query_path = self._make_path_registry([User, "orders"]) eq_(opt._generate_cache_key(query_path), ((Order, 'items', Item, ('lazy', 'joined'), ('innerjoin', True)), (Order, 'items', Item, 'description', ('deferred', True), ('instrument', True))))
def test_bound_cache_key_included_unsafe_query(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') au = aliased(User) query_path = self._make_path_registry([inspect(au), "orders"]) opt = Load(au).joinedload(au.orders).\ joinedload(Order.items) eq_( opt._generate_cache_key(query_path), False )
def sip_lines_for_device(device_id): query = (Session.query(LineFeatures, UserSIP, Extension).join( LineFeatures.endpoint_sip).join(LineFeatures.user_lines).join( UserLine.main_user_rel).join(LineFeatures.line_extensions).join( LineExtension.main_extension_rel).filter( LineFeatures.device == device_id).options( Load(LineFeatures).load_only("id", "configregistrar"), Load(UserSIP).load_only("id", "callerid", "name", "secret"), Load(Extension).load_only("id", "exten"), )) return query.all()
def test_bound_cache_key_included_safe_w_deferred(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).joinedload(User.addresses).\ defer(Address.email_address).defer(Address.user_id) eq_(opt._generate_cache_key(query_path), ( (Address, "email_address", ('deferred', True), ('instrument', True)), (Address, "user_id", ('deferred', True), ('instrument', True)), ))
def test_bound_cache_key_included_safe(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "orders"]) opt = Load(User).joinedload(User.orders).joinedload(Order.items) eq_( opt._generate_cache_key(query_path), ( ((Order, 'items', Item, ('lazy', 'joined')),) ) )
def get_row(self, trunk_id): query = (self.session.query( TrunkFeatures, UserIAX, UserCustom).outerjoin(TrunkFeatures.endpoint_sip).outerjoin( TrunkFeatures.endpoint_iax).outerjoin( TrunkFeatures.endpoint_custom).options( Load(TrunkFeatures).load_only("id", "context"), Load(UserIAX).load_only("id", "category", "context"), Load(UserCustom).load_only( "id", "category", "context")).filter(TrunkFeatures.id == trunk_id)) return query.first()
def build_entity_query(self): """ Builds a :class:`sqla:sqlalchemy.orm.query.Query` object for this entity (an instance of :class:`sir.schema.searchentities.SearchEntity`) that eagerly loads the values of all search fields. :rtype: :class:`sqla:sqlalchemy.orm.query.Query` """ root_model = self.model query = Query(root_model) paths = [field.paths for field in self.fields] if (config.CFG.getboolean("sir", "wscompat") and self.extrapaths is not None): paths.extend([self.extrapaths]) merged_paths = merge_paths(paths) for field_paths in paths: for path in field_paths: current_merged_path = merged_paths model = root_model load = Load(model) split_path = path.split(".") for pathelem in split_path: current_merged_path = current_merged_path[pathelem] column = getattr(model, pathelem) prop = column.property if isinstance(prop, RelationshipProperty): pk = column.mapper.primary_key[0].name if prop.direction == ONETOMANY: load = load.subqueryload(pathelem) elif prop.direction == MANYTOONE: load = load.joinedload(pathelem) else: load = load.defaultload(pathelem) required_columns = current_merged_path.keys() required_columns.append(pk) # Get the mapper class of the current element of the path so # the next iteration can access it. model = prop.mapper.class_ logger.debug("Loading only %s on %s", required_columns, model) load = defer_everything_but(class_mapper(model), load, *required_columns) query = query.options(load) return query
def test_bound_cache_key_undefer_group(self): User, Address = self.classes('User', 'Address') query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).defaultload(User.addresses).undefer_group('xyz') eq_( opt._generate_cache_key(query_path), ( (Address, 'column:*', ("undefer_group_xyz", True)), ) )
def test_bound_cache_key_included_safe_w_loadonly(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).defaultload(User.addresses).\ load_only("id", "email_address") eq_(opt._generate_cache_key(query_path), ((Address, 'id', ('deferred', False), ('instrument', True)), (Address, 'email_address', ('deferred', False), ('instrument', True)), (Address, 'column:*', ('deferred', True), ('instrument', True), ('undefer_pks', True))))
def get_descriptions(self, code, with_pdb): subquery = scoped_db.query(PfamA) subquery = subquery.filter( or_(PfamA.pfamA_acc == code.upper(), PfamA.pfamA_id.ilike(code))).distinct().subquery() query1 = scoped_db.query(PfamARegFullSignificant.pfamseq_acc, PfamARegFullSignificant.seq_start, PfamARegFullSignificant.seq_end) query1 = query1.filter( PfamARegFullSignificant.pfamA_acc == subquery.c.pfamA_acc, PfamARegFullSignificant.in_full) query1 = query1.options( Load(PfamARegFullSignificant).load_only("seq_start", "seq_end")) query1 = query1.distinct().subquery() # query2 = scoped_db.query(Pfamseq.pfamseq_id) # query2 = query2.filter(Pfamseq.pfamA_acc == subquery.c.pfamA_acc).distinct().subquery() query = scoped_db.query( concat(Pfamseq.pfamseq_id, '/', cast(query1.c.seq_start, types.Unicode), '-', cast(query1.c.seq_end, types.Unicode))) query = query.filter(Pfamseq.pfamseq_acc == query1.c.pfamseq_acc) if with_pdb: subquery2 = scoped_db.query(PdbPfamAReg) subquery2 = subquery2.filter(PdbPfamAReg.pfamA_acc == subquery.c. pfamA_acc).distinct().subquery() query = query.filter( PfamARegFullSignificant.pfamseq_acc == subquery2.c.pfamseq_acc) query = query.order_by(Pfamseq.pfamseq_id.asc()) return query.distinct().all()
def convert(self, mass_shift_cache=None, scan_cache=None, structure_cache=None, peptide_relation_cache=None): if scan_cache is None: scan_cache = dict() session = object_session(self) flag = session.info.get("has_spectrum_match_mass_shift") if flag: spectrum_match_q = self.spectrum_matches.options( Load(GlycopeptideSpectrumMatch).undefer( "mass_shift_id")).all() else: spectrum_match_q = self.spectrum_matches.all() matches = [ x.convert(mass_shift_cache=mass_shift_cache, scan_cache=scan_cache, structure_cache=structure_cache, peptide_relation_cache=peptide_relation_cache) for x in spectrum_match_q ] matches.sort(key=lambda x: x.score, reverse=True) solution_set_tp = MemorySpectrumSolutionSet if matches and matches[0].is_multiscore(): solution_set_tp = MultiScoreSpectrumSolutionSet inst = solution_set_tp(convert_scan_to_record(self.scan), matches) inst.q_value = min(x.q_value for x in inst) inst.id = self.id return inst
def migrate_roles( session: Session, pvm_key_map: PvmMigrationMapType, commit: bool = False, ) -> None: """ Migrates all existing roles that have the permissions to be migrated """ # Collect a map of PermissionView objects for migration pvm_map: Dict[PermissionView, List[PermissionView]] = {} for old_pvm_key, new_pvms_ in pvm_key_map.items(): old_pvm = _find_pvm(session, old_pvm_key.view, old_pvm_key.permission) if old_pvm: for new_pvm_key in new_pvms_: new_pvm = _find_pvm(session, new_pvm_key.view, new_pvm_key.permission) if old_pvm not in pvm_map: pvm_map[old_pvm] = [new_pvm] else: pvm_map[old_pvm].append(new_pvm) # Replace old permissions by the new ones on all existing roles roles = session.query(Role).options(Load(Role).joinedload(Role.permissions)).all() for role in roles: for old_pvm, new_pvms in pvm_map.items(): if old_pvm in role.permissions: logger.info(f"Removing {old_pvm} from {role}") role.permissions.remove(old_pvm) for new_pvm in new_pvms: if new_pvm not in role.permissions: logger.info(f"Add {new_pvm} to {role}") role.permissions.append(new_pvm) session.merge(role) # Delete old permissions _delete_old_permissions(session, pvm_map) if commit: session.commit()
def find_areas(session, request_id): resp = session.query(Area, Request). \ filter(ST_Contains(Area.coordinate, Request.coordinate)). \ filter(Request.id == request_id).\ options(Load(Area).load_only("id")) # Returns 'Area' objects only. return list(map(lambda r: r[0], resp))
def update_device_registration_status(request, device_id, registration_status): try: device_id = str(device_id) device = request.app.session.query(Device). \ options( Load(Device).load_only("id","registration_status","deleted_at") ). \ filter(Device.deleted_at == None).\ filter(Device.device_id == device_id). \ one_or_none() if not device: logging.getLogger().warn( "Device with given device_id={} doesn't exist".format( device_id)) raise InvalidValueException("device_id") device.registration_status = registration_status device.save() request.app.session.commit() request.app.session.flush() return True except Exception as e: logging.getLogger().info( "Exception updating device information: {}".format(e)) request.app.session.rollback() return False
def test_unsafe_bound_option_cancels_bake(self): User, Address, Dingaling = self._o2m_twolevel_fixture(lazy="joined") class SubDingaling(Dingaling): pass mapper(SubDingaling, None, inherits=Dingaling) lru = Address.dingalings.property._lazy_strategy._bakery( lambda q: None )._bakery l1 = len(lru) for i in range(5): sess = Session() u1 = ( sess.query(User) .options( Load(User) .defaultload(User.addresses) .lazyload( Address.dingalings.of_type(aliased(SubDingaling)) ) ) .first() ) for ad in u1.addresses: ad.dingalings l2 = len(lru) eq_(l1, 0) eq_(l2, 1)
def test_query_expr(self): (User, ) = self.classes("User") self._run_cache_key_fixture( lambda: ( with_expression(User.name, true()), with_expression(User.name, null()), with_expression(User.name, func.foobar()), with_expression(User.name, User.name == "test"), Load(User).with_expression(User.name, true()), Load(User).with_expression(User.name, null()), Load(User).with_expression(User.name, func.foobar()), Load(User).with_expression(User.name, User.name == "test"), ), compare_values=True, )
def get_all_available_schedule(): from_date = datetime.now() station1 = aliased(Station) station2 = aliased(Station) query = db_session.query(Schedule)\ .join(station1, station1.id == Schedule.departure_station_id) \ .join(station2, station2.id == Schedule.arrival_station_id) \ .options( Load(Schedule).load_only("id", "arrival_time", "departure_time", "price"). contains_eager(Schedule.departure_station, alias=station1).load_only("id", "name"), contains_eager(Schedule.arrival_station, alias=station2).load_only("id", "name") ).filter(Schedule.departure_time >= from_date) # Equivalent SQL but no contains eager # query = db_session.query(Schedule)\ # .join(station1, station1.id == Schedule.departure_station_id) \ # .join(station2, station2.id == Schedule.arrival_station_id). \ # options( # Load(Schedule).load_only("id", "arrival_time", "departure_time", "price"), # Load(station1).load_only("id", "name"), # Load(station2).load_only("id", "name"))\ # .filter(Schedule.departure_time >= from_date) request = query.all() return request
def manage_project_releases(project, request): # Get the counts for all the files for this project, grouped by the # release version and the package types filecounts = (request.db.query( Release.version, File.packagetype, func.count(File.id)).options( Load(Release).load_only("version")).outerjoin(File).group_by( Release.id).group_by( File.packagetype).filter(Release.project == project).all()) # Turn rows like: # [('0.1', 'bdist_wheel', 2), ('0.1', 'sdist', 1)] # into: # { # '0.1: { # 'bdist_wheel': 2, # 'sdist': 1, # 'total': 3, # } # } version_to_file_counts = {} for version, packagetype, count in filecounts: packagetype_to_count = version_to_file_counts.setdefault(version, {}) packagetype_to_count.setdefault("total", 0) packagetype_to_count[packagetype] = count packagetype_to_count["total"] += count return { "project": project, "version_to_file_counts": version_to_file_counts }
def all_scales(): scales = db_session.query(MusicScale).options( joinedload(MusicScale.names, innerjoin=True), Load(MusicScale).raiseload('*')).filter(MusicScale.tones == 12) return {'scales': [s.serialize() for s in scales]}
async def get_device_by_id(request, device_id): """ Tries to get IdentificationDataDevice by given device_id. :param app: application entity :param device_id: device ID :raise 404: if device with given device_id is not found :return: lookup IdentificationDataDevice entity """ device_id = str(device_id) try: device = request.app.session.query(Device). \ options( Load(Device).load_only("id", "device_id", "registration_status", "push_notification_token", "type", "deleted_at") ). \ filter(Device.deleted_at == None). \ filter(Device.device_id == device_id).\ order_by(Device.id). \ one_or_none() if device: logging.getLogger().warn( "Device with given device_id={} found".format(device_id)) return device else: logging.getLogger().warn( "Device with given device_id={} doesn't exist".format( device_id)) raise InvalidValueException("device_id") except Exception as e: logging.getLogger().info("Alchemy Exception: {}".format(e)) request.app.session.rollback() return None
def work_search_view(request): offset = request.validated['querystring']['offset'] limit = request.validated['querystring']['limit'] order_by = [Work.title.asc()] query = request.validated['querystring'].get('query') type = request.validated['querystring'].get('type') filters = [] if query: filters.append(Work.search_terms.match(query)) if type: filters.append(Work.type == type) from_query = request.context.session.query(Work) from_query = from_query.options( Load(Work).load_only('id', 'title')) # allow search listing with editor principals listing = request.context.search( filters=filters, offset=offset, limit=limit, order_by=order_by, format=format, from_query=from_query, principals=['group:editor']) snippets = [] for hit in listing['hits']: snippets.append({'id': hit.id, 'info': hit.type, 'name': hit.title}) return {'total': listing['total'], 'snippets': snippets, 'limit': limit, 'offset': offset, 'status': 'ok'}
def _get_query(self) -> 'Query': """ Build, filter and sort the query. Returns: SQLAlchemy query. """ subquery = graphene_sqlalchemy.get_query(self.model, self.info.context) request_filters = self.graphql_args.get(self.filter_arg) if request_filters: filter_set = self._get_filter_set(self.info) subquery = filter_set.filter(self.info, subquery, request_filters) aliased_model = aliased(self.model, subquery.subquery(with_labels=True)) query = (graphene_sqlalchemy.get_query( self.parent_model, self.info.context).join(aliased_model, self.relation).options( contains_eager(self.relation, alias=aliased_model), Load(self.parent_model).load_only(*self.parent_model_pks), )) query = self._sorted_query(query, self.graphql_args.get('sort'), aliased_model) return query
def go(): l = q.options( Load(Order).undefer_group('primary').undefer_group('secondary')).all() o2 = l[2] eq_(o2.opened, 1) eq_(o2.userident, 7) eq_(o2.description, 'order 3')
def _test_load_only_propagate(self, use_load): User = self.classes.User Address = self.classes.Address users = self.tables.users addresses = self.tables.addresses mapper(User, users, properties={ "addresses": relationship(Address) }) mapper(Address, addresses) sess = create_session() expected = [ ("SELECT users.id AS users_id, users.name AS users_name " "FROM users WHERE users.id IN (:id_1, :id_2)", {'id_2': 8, 'id_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 7}), ("SELECT addresses.id AS addresses_id, " "addresses.email_address AS addresses_email_address " "FROM addresses WHERE :param_1 = addresses.user_id", {'param_1': 8}), ] if use_load: opt = Load(User).defaultload(User.addresses).load_only("id", "email_address") else: opt = defaultload(User.addresses).load_only("id", "email_address") q = sess.query(User).options(opt).filter(User.id.in_([7, 8])) def go(): for user in q: user.addresses self.sql_eq_(go, expected)
def test_query_opts_key_bound_branching(self): A, B, C, D, E, F, G = self.classes('A', 'B', 'C', 'D', 'E', 'F', 'G') base = Load(A).joinedload(A.bs) opts = [ base.joinedload(B.cs), base.joinedload(B.ds), base.joinedload(B.es), base.joinedload(B.fs) ] q = Session().query(A) @profiling.function_call_count() def go(): q.options(*opts) go()
def test_bound_cache_key_included_safe_w_option(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') opt = Load(User).defaultload("orders").joinedload( "items", innerjoin=True).defer("description") query_path = self._make_path_registry([User, "orders"]) eq_( opt._generate_cache_key(query_path), ( (Order, 'items', Item, ('lazy', 'joined'), ('innerjoin', True)), (Order, 'items', Item, 'description', ('deferred', True), ('instrument', True)) ) )
def test_generate_cache_key_bound_branching(self): A, B, C, D, E, F, G = self.classes('A', 'B', 'C', 'D', 'E', 'F', 'G') base = Load(A).joinedload(A.bs) opts = [ base.joinedload(B.cs), base.joinedload(B.ds), base.joinedload(B.es), base.joinedload(B.fs) ] cache_path = inspect(A)._path_registry @profiling.function_call_count() def go(): for opt in opts: opt._generate_cache_key(cache_path) go()
def test_query_opts_key_bound_branching(self): A, B, C, D, E, F, G = self.classes("A", "B", "C", "D", "E", "F", "G") base = Load(A).joinedload(A.bs) opts = [ base.joinedload(B.cs), base.joinedload(B.ds), base.joinedload(B.es), base.joinedload(B.fs), ] q = Session().query(A) @profiling.function_call_count() def go(): q.options(*opts) go()
def test_generate_cache_key_bound_branching(self): A, B, C, D, E, F, G = self.classes("A", "B", "C", "D", "E", "F", "G") base = Load(A).joinedload(A.bs) opts = [ base.joinedload(B.cs), base.joinedload(B.ds), base.joinedload(B.es), base.joinedload(B.fs), ] cache_path = inspect(A)._path_registry @profiling.function_call_count() def go(): for opt in opts: opt._generate_cache_key(cache_path) go()
def test_bound_cache_key_included_safe_w_loadonly(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).defaultload(User.addresses).\ load_only("id", "email_address") eq_( opt._generate_cache_key(query_path), ( (Address, 'id', ('deferred', False), ('instrument', True)), (Address, 'email_address', ('deferred', False), ('instrument', True)), (Address, 'column:*', ('deferred', True), ('instrument', True), ('undefer_pks', True)) ) )
def test_bound_cache_key_excluded_of_type_unsafe(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') # query of: # # query(User).options( # subqueryload(User.orders). # subqueryload(Order.items.of_type(aliased(SubItem)))) # # # we are lazy loading Address objects from User.addresses # the path excludes our option so cache key should # be None query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).subqueryload(User.orders).\ subqueryload(Order.items.of_type(aliased(SubItem))) eq_( opt._generate_cache_key(query_path), None )
def test_bound_cache_key_included_safe_w_deferred(self): User, Address, Order, Item, SubItem = self.classes( 'User', 'Address', 'Order', 'Item', 'SubItem') query_path = self._make_path_registry([User, "addresses"]) opt = Load(User).joinedload(User.addresses).\ defer(Address.email_address).defer(Address.user_id) eq_( opt._generate_cache_key(query_path), ( ( Address, "email_address", ('deferred', True), ('instrument', True) ), ( Address, "user_id", ('deferred', True), ('instrument', True) ), ) )
def build_entity_query(self): """ Builds a :class:`sqla:sqlalchemy.orm.query.Query` object for this entity (an instance of :class:`sir.schema.searchentities.SearchEntity`) that eagerly loads the values of all search fields. :rtype: :class:`sqla:sqlalchemy.orm.query.Query` """ root_model = self.model query = Query(root_model) paths = [field.paths for field in self.fields] if (config.CFG.getboolean("sir", "wscompat") and self.extrapaths is not None): paths.extend([self.extrapaths]) merged_paths = merge_paths(paths) for field_paths in paths: for path in field_paths: current_merged_path = merged_paths model = root_model load = Load(model) split_path = path.split(".") for pathelem in split_path: current_merged_path = current_merged_path[pathelem] column = getattr(model, pathelem) # __tablename__s in annotation paths if (not isinstance(column, InstrumentedAttribute) and not isinstance(column, CompositeProperty)): break prop = column.property if isinstance(prop, RelationshipProperty): pk = column.mapper.primary_key[0].name if prop.direction == ONETOMANY: load = load.subqueryload(pathelem) elif prop.direction == MANYTOONE: load = load.joinedload(pathelem) else: load = load.defaultload(pathelem) required_columns = current_merged_path.keys() required_columns.append(pk) # Get the mapper class of the current element of the # path so the next iteration can access it. model = prop.mapper.class_ # For composite properties, load the columns they # consist of because eagerly loading a composite # property doesn't load automatically load them. composite_columns = filter( partial(is_composite_column, model), required_columns) for composite_column in composite_columns: composite_parts = (c.name for c in getattr(model, composite_column). property.columns) logger.debug("Loading %s instead of %s on %s", composite_parts, composite_column, model) required_columns.remove(composite_column) required_columns.extend(composite_parts) logger.debug("Loading only %s on %s", required_columns, model) load = defer_everything_but(class_mapper(model), load, *required_columns) query = query.options(load) if self.extraquery is not None: query = self.extraquery(query) return query