def base_query(self, query: Query): query = query.join(ValueSet).options( joinedload(Value.valueset).joinedload( ValueSet.references).joinedload(ValueSetReference.source)) if self.language: query = query.join(ValueSet.parameter) if self.language.level == models.LanguoidLevel.group: children = self.language.children children_pks = [child.pk for child in children] filtered = query.filter(ValueSet.language_pk.in_(children_pks)) filtered = filtered.join(ValueSet.language) return filtered return query.filter(ValueSet.language_pk == self.language.pk) if self.parameter: query = query.join(ValueSet.language) query = query.outerjoin(DomainElement).options( joinedload(Value.domainelement)) return query.filter(ValueSet.parameter_pk == self.parameter.pk) if self.contribution: query = query.join(ValueSet.parameter) return query.filter( ValueSet.contribution_pk == self.contribution.pk) query = query.join(ValueSet.language).join(ValueSet.parameter) return query
def __init__ (self, entities, session): """Create a GrimoreQuery. Parameters ---------- entities: list of SQLAlchemy entities Entities (tables) to include in the query session: SQLAlchemy session SQLAlchemy session to use to connect to the database Attributes ---------- self.start: datetime.datetime Start of the period to consider for commits. Default: None (start from the first commit) self.end: datetime.datetime End of the period to consider for commits. Default: None (end in the last commit) """ self.start = None self.end = None # Keep an accounting of which tables have been joined, to avoid # undesired repeated joins self.joined = [] Query.__init__(self, entities, session)
def scorecard(self): sc = Query([ Game.gid, Game.player_id, type_coerce(self._uniq(), Integer).label("uniq"), type_coerce(self._brenter(), Integer).label("brenter"), type_coerce(self._brend(), Integer).label("brend"), type_coerce(self._god(), Integer).label("god"), type_coerce(self._rune(1), Integer).label("rune"), type_coerce(self._rune(3), Integer).label("threerune"), self._win().label("win"), self._bonus(self.tier1).label("bonusone"), self._bonus(self.tier2).label("bonustwo"), ]).filter(Game.gid.in_(self.gids)).subquery() return Query( [Player, Game]).select_from(CsdcContestant).join(Player).outerjoin( sc, CsdcContestant.player_id == sc.c.player_id).outerjoin( Game, Game.gid == sc.c.gid).add_columns( sc.c.uniq, sc.c.brenter, sc.c.brend, sc.c.god, sc.c.rune, sc.c.threerune, sc.c.win, sc.c.bonusone, sc.c.bonustwo, func.max(sc.c.uniq + sc.c.brenter + sc.c.brend + sc.c.god + sc.c.rune + sc.c.threerune + sc.c.win + sc.c.bonusone + sc.c.bonustwo).label("total")).group_by( CsdcContestant.player_id).order_by( desc("total"), Game.start)
def overview(): q = Query(Player) totalcols = [] for wk in weeks: wk_n = "wk" + wk.number a = wk.scorecard().subquery() q = q.outerjoin(a, Player.id == a.c.player_id).add_column( a.c.subtotal.label(wk_n) ).add_column(a.c.time.label(wk_n + "time")).add_column( a.c.turns.label(wk_n + "turns")).add_column( a.c.slimy.label(wk_n + "slimy")).add_column( a.c.silver.label(wk_n + "silver")).add_column( a.c.iron.label(wk_n + "iron")).add_column( a.c.bone.label(wk_n + "bone")).add_column( a.c.obsidian.label(wk_n + "obsidian") ).add_column( a.c.icy.label(wk_n + "icy")).add_column( a.c.pan.label(wk_n + "pan")).add_column( a.c.qaz.label(wk_n + "qaz") ).add_column(a.c.chei.label( wk_n + "chei")).add_column( a.c.lucy.label(wk_n + "lucy") ).add_column( a.c.jiyva.label(wk_n + "jiyva")) return q
def apply_order_by( self, query: Query, order_column: str, order_direction: str, aliases_mapping: Dict[str, AliasedClass] = None, ) -> Query: if order_column != "": # if Model has custom decorator **renders('<COL_NAME>')** # this decorator will add a property to the method named *_col_name* if hasattr(self.obj, order_column): if hasattr(getattr(self.obj, order_column), "_col_name"): order_column = getattr(self._get_attr(order_column), "_col_name") _order_column = self._get_attr(order_column) or order_column if is_column_dotted(order_column): root_relation = get_column_root_relation(order_column) # On MVC we still allow for joins to happen here if not self.is_model_already_joined( query, self.get_related_model(root_relation)): query = self._query_join_relation( query, root_relation, aliases_mapping=aliases_mapping) column_leaf = get_column_leaf(order_column) _alias = self.get_alias_mapping(root_relation, aliases_mapping) _order_column = getattr(_alias, column_leaf) if order_direction == "asc": query = query.order_by(asc(_order_column)) else: query = query.order_by(desc(_order_column)) return query
def get_cboe_sym_ts_from_db(query_start_date, query_end_date, symbol_list=['SPX'], px_types=['ALL']): """ Get timeseries data from mysql :param symbol_list: :param query_start_date: :param query_end_date: :param px_types: Open, High, Low, Close, LastSale :return: """ list_poc_db_col_nm = px_types session = get_db_session() if px_types[0] == 'ALL': list_poc_db_col_nm = ['Id', 'Symbol', 'Open', 'High', 'Low', 'Close', 'LastSale', 'LastTime'] the_columns = [getattr(VixTermStructure, poc_db_col_nm) for poc_db_col_nm in list_poc_db_col_nm] q = Query(the_columns, session=session) else: list_poc_db_col_nm = ['Id', 'Symbol'] + px_types + ['LastSale', 'LastTime'] the_columns = [getattr(VixTermStructure, poc_db_col_nm) for poc_db_col_nm in list_poc_db_col_nm] q = Query(the_columns, session=session) from_db = q.filter(VixTermStructure.Id >= str(query_start_date), VixTermStructure.Id <= str(query_end_date), VixTermStructure.Symbol.in_(symbol_list)).all() df = pd.DataFrame().from_records(from_db) # we are going to need to create a MULTI-INDEX for this returned Dataframe # first level is the DATE (Id) and second level is the Symbol (symbol) # TODO: LEFT OFF HERE ON JUNE 13th, before heading out to pick up Bane with Nada. df.columns = list_poc_db_col_nm df.set_index(['Id', 'Symbol'], inplace=True) return df
def authorize_query(query: Query, get_oso, get_user, get_action) -> Query: """Authorize an existing query with an oso instance, user and action.""" oso = get_oso() action = get_action() actor = get_user() # TODO (dhatch): This is necessary to allow ``authorize_query`` to work # on queries that have already been made. If a query has a LIMIT or OFFSET # applied, SQLAlchemy will by default throw an error if filters are applied. # This prevents these errors from occuring, but could result in some # incorrect queries. We should remove this if possible. query = query.enable_assertions(False) entities = {column["entity"] for column in query.column_descriptions} for entity in entities: # Only apply authorization to columns that represent a mapper entity. if entity is None: continue authorized_filter = authorize_model_filter(oso, actor, action, query.session, entity) if authorized_filter is not None: query = query.filter(authorized_filter) return query
def __init__(self, entities, session, **kw): SaQuery.__init__(self, entities, session, **kw) ent_cls = entities[0] if isinstance(ent_cls, type) and issubclass(ent_cls, Entity): self._entity_class = ent_cls else: self._entity_class = None
def __init__ (self, entities, session): """Create an SCMQuery. Parameters ---------- entities: session: Attributes ---------- self.start: datetime.datetime Start of the period to consider for commits. Default: None (start from the first commit) self.end: datetime.datetime End of the period to consider for commits. Default: None (end in the last commit) """ self.start = None self.end = None # Keep an accounting of which tables have been joined, to avoid # undesired repeated joins self.joined = [] Query.__init__(self, entities, session)
def __init__(self, entities, session, **kw): SaQuery.__init__(self, entities, session, **kw) ent_cls = entities[0] if isinstance(ent_cls, type) and issubclass(ent_cls, Entity): self._entity_class = ent_cls else: # just for compatibility pragma: no cover self._entity_class = None
def _apply_range_filter(query: Query, key: ClauseElement, value_range: Tuple[T, T]) -> Query: gte_value, lte_value = value_range if gte_value is not None: query = query.filter(key >= gte_value) if lte_value is not None: query = query.filter(key <= lte_value) return query
def run(): db = DBManager() jq = JQData() start_date = datetime.now().strftime("%Y%m%d") date_id = datetime.strftime(parse(start_date) + relativedelta(days=-30), '%Y%m%d') stock_pool = pd.read_csv(os.path.join(PROJECT_DIR, "config/pool.txt"), dtype=str, sep=' ', names=['tag', 'name', 'code', 'price']) stocks = stock_pool['code'].map(lambda x: jq.normalize_code(x)).tolist() df = finance.run_query(Query(finance.STK_HK_HOLD_INFO).filter( or_(finance.STK_HK_HOLD_INFO.link_id == i for i in [310001, 310002]), or_(finance.STK_HK_HOLD_INFO.code == i for i in stocks), finance.STK_HK_HOLD_INFO.day >= date_id ))[['day', 'name', 'share_ratio']] df['day'] = df['day'].map(lambda x: datetime.strftime(x, "%Y%m%d")) trade_day = finance.run_query(Query(finance.STK_EXCHANGE_LINK_CALENDAR).filter( finance.STK_EXCHANGE_LINK_CALENDAR.day >= date_id, or_(finance.STK_EXCHANGE_LINK_CALENDAR.link_id == i for i in [310001, 310002])) )[['day', 'type']].drop_duplicates() trade_day['day'] = trade_day['day'].map(lambda x: datetime.strftime(x, "%Y%m%d")) merge_df = df.merge(trade_day, on='day', how='left') merge_df = merge_df[merge_df['type'] == '正常交易日'] del merge_df['type'] db.write(merge_df, 'tail_northup', mode='w')
def get(self, ident): prop = self.property_name if self._criterion: mapper = self._only_full_mapper_zero("get") # Don't use getattr/hasattr to check public existence, since this # might misinterpret a bug (AttributeError raised by some code in # property implementation) as missing attribute and cause all # private data going to public. if prop in dir(mapper.class_): crit = getattr(mapper.class_, prop) if crit is not None: if not isinstance(crit, ClauseElement): # This simplest safe way to make bare boolean column # accepted as expression. crit = cast(crit, Boolean) if crit!=self._criterion: # We can't verify that criterion is from our private() # call. Check from DB instead of looking in identity # map. assert False # XXX temporal to verify it's used return Query.get(self.populate_existing(), ident) assert False # XXX temporal to verify it's used obj = Query.get(self, ident) if obj is not None and (prop not in dir(obj) or getattr(obj, prop)): return obj
def apply_pagination(self, query: Query, page: Optional[int], page_size: Optional[int]) -> Query: if page and page_size: query = query.offset(page * page_size) if page_size: query = query.limit(page_size) return query
def scorecard(self): sc = Query([ Game.gid, Game.player_id, type_coerce(self._XL(10) * 10, Integer).label("xl"), type_coerce(self._win() * 15, Integer).label("win"), type_coerce(self._realtime(6000) * 20, Integer).label("time"), type_coerce(self._turncount("Dis:1", 30000) * 20, Integer).label("turns"), type_coerce(self._rune("Slime:5") * 10, Integer).label("slimy"), type_coerce(self._rune("Vaults:3") * 10, Integer).label("silver"), type_coerce(self._rune("Dis:2") * 10, Integer).label("iron"), type_coerce(self._rune("Tar:2") * 10, Integer).label("bone"), type_coerce(self._rune("Geh:2") * 10, Integer).label("obsidian"), type_coerce(self._rune("Coc:2") * 10, Integer).label("icy"), type_coerce(self._rune("Pan") * 20, Integer).label("pan"), type_coerce(self._god("Qazlal") * 6, Integer).label("qaz"), type_coerce(self._god("Jiyva") * 6, Integer).label("jiyva"), type_coerce(self._god("Lugonu") * 6, Integer).label("lucy"), type_coerce(self._god("Cheibriados") * 6, Integer).label("chei"), ]).filter(Game.gid.in_(self.gids)).subquery() return Query([Player, Game]).select_from(Player).outerjoin( Game, Game.gid == sc.c.gid).add_columns( sc.c.xl, sc.c.win, sc.c.time, sc.c.turns, sc.c.slimy, sc.c.silver, sc.c.iron, sc.c.bone, sc.c.obsidian, sc.c.icy, sc.c.pan, sc.c.qaz, sc.c.chei, sc.c.lucy, sc.c.jiyva, func.max(sc.c.xl + sc.c.win).label("subtotal"), func.max(sc.c.xl + sc.c.win + sc.c.time + sc.c.turns + sc.c.slimy + sc.c.silver + sc.c.iron + sc.c.bone + sc.c.obsidian + sc.c.icy + sc.c.pan + sc.c.qaz + sc.c.chei + sc.c.lucy + sc.c.jiyva).label("total")).group_by( sc.c.player_id).order_by(desc("total"), Game.start)
def _authorize_query(query: Query) -> Optional[Query]: """Authorize an existing query with an oso instance, user and action.""" # Get the query session. session = query.session # Check whether this is an oso session. if not isinstance(session, AuthorizedSessionBase): # Not an authorized session. return None oso = session.oso_context["oso"] user = session.oso_context["user"] action = session.oso_context["action"] # TODO (dhatch): This is necessary to allow ``authorize_query`` to work # on queries that have already been made. If a query has a LIMIT or OFFSET # applied, SQLAlchemy will by default throw an error if filters are applied. # This prevents these errors from occuring, but could result in some # incorrect queries. We should remove this if possible. query = query.enable_assertions(False) entities = {column["entity"] for column in query.column_descriptions} for entity in entities: # Only apply authorization to columns that represent a mapper entity. if entity is None: continue authorized_filter = authorize_model(oso, user, action, query.session, entity) if authorized_filter is not None: query = query.filter(authorized_filter) return query
def test_query_column_name(self): # test for bug: http://groups.google.com/group/geoalchemy/browse_thread/thread/6b731dd1673784f9 from sqlalchemy.orm.query import Query query = Query(Road.road_geom).filter(Road.road_geom == '..').__str__() ok_('AsBinary(roads.road_geom)' in query, 'table name is part of the column expression (select clause)') ok_('WHERE Equals(roads.road_geom' in query, 'table name is part of the column expression (where clause)') query_wkb = Select([Road.road_geom]).where(Road.road_geom == 'POINT(0 0)').__str__() ok_('SELECT AsBinary(roads.road_geom)' in query_wkb, 'AsBinary is added') ok_('WHERE Equals(roads.road_geom' in query_wkb, 'AsBinary is not added in where clause') # test for RAW attribute query_wkb = Select([Road.road_geom.RAW]).__str__() ok_('SELECT roads.road_geom' in query_wkb, 'AsBinary is not added') ok_(session.query(Road.road_geom.RAW).first()) query_srid = Query(func.SRID(Road.road_geom.RAW)) ok_('SRID(roads.road_geom)' in query_srid.__str__(), 'AsBinary is not added') ok_(session.scalar(query_srid)) eq_(session.scalar(Select([func.SRID(Spot.spot_location)]).where(Spot.spot_id == 1)), None, 'AsBinary is added and the SRID is not returned') eq_(str(session.scalar(Select([func.SRID(Spot.spot_location.RAW)]).where(Spot.spot_id == 1))), '4326', 'AsBinary is not added and the SRID is returned') spot_alias = aliased(Spot) query_wkt = Select([func.wkt(spot_alias.spot_location.RAW)]).__str__() ok_('SELECT wkt(spots_1.spot_location' in query_wkt, 'Table alias is used in select clause') ok_('FROM spots AS spots_1' in query_wkt, 'Table alias is used in from clause')
def get(self, ident): prop = self.property_name if self._criterion: # pragma: no cover mapper = self._only_full_mapper_zero("get") # Don't use getattr/hasattr to check public existence, since this # might misinterpret a bug (AttributeError raised by some code in # property implementation) as missing attribute and cause all # private data going to public. if prop in dir(mapper.class_): crit = getattr(mapper.class_, prop) if crit is not None: if not isinstance(crit, ClauseElement): # This simplest safe way to make bare boolean column # accepted as expression. crit = cast(crit, Boolean) if crit != self._criterion: # We can't verify that criterion is from our private() # call. Check from DB instead of looking in identity # map. assert False # XXX temporal to verify it's used return Query.get(self.populate_existing(), ident) assert False # XXX temporal to verify it's used obj = Query.get(self, ident) if obj is not None and (prop not in dir(obj) or getattr(obj, prop)): return obj
def element_atomic_weight(self, zeq, reference=None): z = self._get_z(zeq) q = Query(ElementAtomicWeightProperty.value) q = q.join(Element) q = q.filter(Element.z == z) exception = ValueError('Unknown atomic weight for z="{0}" and ' 'reference="{1}"'.format(z, reference)) return self._query_with_references(q, exception, reference)
def apply_filter(query: Query, comparator: ComparisonFunc[common.T], arg: common.Filterable[common.T]) -> Query: if arg is None: return query elif isinstance(arg, Iterable) and not isinstance(arg, str): return query.filter(comparator.__self__.in_(arg)) else: return query.filter(comparator(arg))
def element_mass_density_kg_per_m3(self, zeq, reference=None): z = self._get_z(zeq) q = Query(ElementMassDensityProperty.value_kg_per_m3) q = q.join(Element) q = q.filter(Element.z == z) exception = ValueError('Unknown mass density for z="{0}" and ' 'reference="{1}"'.format(z, reference)) return self._query_with_references(q, exception, reference)
def __iter__(self): """override __iter__ to pull results from Beaker if particular attributes have been configured. """ if hasattr(self, '_cache_parameters'): return self.get_value(createfunc=lambda: list(Query.__iter__(self))) else: return Query.__iter__(self)
def apply(self, query: Query, value: Any) -> Query: user_roles = [role.name.lower() for role in list(get_user_roles())] if "admin" in user_roles: return query datasource_perms = security_manager.user_view_menu_names("datasource_access") schema_perms = security_manager.user_view_menu_names("schema_access") published_dash_query = ( db.session.query(Dashboard.id) .join(Dashboard.slices) .filter( and_( Dashboard.published == True, # pylint: disable=singleton-comparison or_( Slice.perm.in_(datasource_perms), Slice.schema_perm.in_(schema_perms), security_manager.can_access_all_datasources(), ), ) ) ) users_favorite_dash_query = db.session.query(FavStar.obj_id).filter( and_( FavStar.user_id == security_manager.user_model.get_user_id(), FavStar.class_name == "Dashboard", ) ) owner_ids_query = ( db.session.query(Dashboard.id) .join(Dashboard.owners) .filter( security_manager.user_model.id == security_manager.user_model.get_user_id() ) ) if "custom" in user_roles: query = query.filter( and_( Dashboard.id.in_(owner_ids_query), ) ) return query query = query.filter( or_( Dashboard.id.in_(owner_ids_query), Dashboard.id.in_(published_dash_query), Dashboard.id.in_(users_favorite_dash_query), ) ) return query
def apply(self, query: Query, value: Any) -> Query: # If anonymous user filter nothing if security_manager.current_user is None: return query users_favorite_query = db.session.query(FavStar.obj_id).filter( and_(FavStar.user_id == g.user.id, FavStar.class_name == self.class_name) ) if value: return query.filter(and_(self.model.id.in_(users_favorite_query))) return query.filter(and_(~self.model.id.in_(users_favorite_query)))
def apply_inner_select_joins( self, query: Query, select_columns: List[str] = None, aliases_mapping: Dict[str, AliasedClass] = None, ) -> Query: """ Add select load options to query. The goal is to only SQL select what is requested and join all the necessary models when dotted notation is used. Inner implies non dotted columns and many to one and one to one :param query: :param select_columns: :return: """ if not select_columns: return query joined_models = list() for column in select_columns: if is_column_dotted(column): root_relation = get_column_root_relation(column) leaf_column = get_column_leaf(column) if self.is_relation_many_to_one( root_relation ) or self.is_relation_one_to_one(root_relation): if root_relation not in joined_models: query = self._query_join_relation( query, root_relation, aliases_mapping=aliases_mapping ) query = query.add_entity( self.get_alias_mapping(root_relation, aliases_mapping) ) # Add relation FK to avoid N+1 performance issue query = self._apply_relation_fks_select_options( query, root_relation ) joined_models.append(root_relation) related_model_ = self.get_alias_mapping( root_relation, aliases_mapping ) relation = getattr(self.obj, root_relation) # The Zen of eager loading :( # https://docs.sqlalchemy.org/en/13/orm/loading_relationships.html query = query.options( contains_eager(relation.of_type(related_model_)).load_only( leaf_column ) ) query = query.options(Load(related_model_).load_only(leaf_column)) else: query = self._apply_normal_col_select_option(query, column) return query
def element_name(self, zeq, language='en', reference=None): z = self._get_z(zeq) q = Query(ElementNameProperty.name) q = q.filter(ElementNameProperty.language_code == language) q = q.join(Element) q = q.filter(Element.z == z) exception = ValueError('Unknown name for z="{0}", ' 'language="{1}" and ' 'reference="{2}"' .format(z, language, reference)) return self._query_with_references(q, exception, reference)
def _lookup_spot_comment_by_spot_gathering_type_unique_id( req: Request, resp: Response, query: Query, *args, **kwargs): # noinspection PyProtectedMember if alchemyMapping.Spot not in [ mapper.entity for mapper in query._join_entities ]: query = query.join( alchemyMapping.Spot, alchemyMapping.SpotComment.spot_angler_spot_id == alchemyMapping.Spot.spot_angler_spot_id) return query.filter(alchemyMapping.Spot.spot_gathering_type_unique_id == kwargs['spot_gathering_type_unique_id'])
def apply(self, query: Query, value: bool) -> Query: check_value = '%"certification":%' if value is True: return query.filter(SqlaTable.extra.ilike(check_value)) if value is False: return query.filter( or_( SqlaTable.extra.notlike(check_value), SqlaTable.extra.is_(None), )) return query
def __init__ (self, entities, session): """Initialize the object self.start and self.end will be used in case there are temporal limits for the query (useful to produce TimeSeries objects, which needs those. """ self.start = None self.end = None Query.__init__(self, entities, session)
def __iter__(self): """override __iter__ to pull results from Beaker if particular attributes have been configured. """ if hasattr(self, '_cache_parameters'): cache, cache_key = _get_cache_parameters(self) ret = cache.get_value(cache_key, createfunc=lambda: list(Query.__iter__(self))) # merge the result in. return self.merge_result(ret, load=False) else: return Query.__iter__(self)
def test_underscore_update_many_query(self, mock_clean): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.update = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._update_many(items, {'foo': 'bar'}) mock_clean.assert_called_once_with(items) clean_items.update.assert_called_once_with( {'foo': 'bar'}, synchronize_session='fetch') assert count == clean_items.update()
def _lookup_spot_bait_total_fish_caught_by_spot_gathering_type( req: Request, resp: Response, query: Query, *args, **kwargs): # noinspection PyProtectedMember if alchemyMapping.Spot not in [ mapper.entity for mapper in query._join_entities ]: query = query.join( alchemyMapping.Spot, alchemyMapping.SpotBaitTotalFishCaught.spot_angler_spot_id == alchemyMapping.Spot.spot_angler_spot_id) return query.filter(alchemyMapping.Spot.spot_gathering_type == kwargs['spot_gathering_type'])
def overview(): q = Query(CsdcContestant) totalcols = [] for wk in weeks: a = wk.scorecard().subquery() totalcols.append(func.ifnull(a.c.total, 0)) q = q.outerjoin(a, CsdcContestant.player_id == a.c.player_id).add_column( a.c.total.label("wk" + wk.number)) return q.add_column(sum(totalcols).label("grandtotal")).order_by( desc("grandtotal"))
def add_coin(self, data): query = Query(CoinDimRow, session=self.DBSession()) query.add_columns(CoinDimRow.trade_symbol) res = query.all() res = list(set([coin.trade_symbol for coin in res])) for coin in data: if coin.strip() in res: pass else: row = CoinDimRow() row.trade_symbol = coin.strip() self.DBSession.add(row) self.DBSession.commit()
def get_query(self, session: Session, *args) -> Query: """ Return a Query object initialized with our model :param session: Session object :return: Query object """ if session is None: raise ValueError('invalid session object') # https://docs.sqlalchemy.org/en/latest/orm/query.html#the-query-object if args: return Query(*args, session=session) return Query(self.model, session=session)
def build_entity_query(self): """ Builds a :class:`sqla:sqlalchemy.orm.query.Query` object for this entity (an instance of :class:`sir.schema.searchentities.SearchEntity`) that eagerly loads the values of all search fields. :rtype: :class:`sqla:sqlalchemy.orm.query.Query` """ root_model = self.model query = Query(root_model) paths = [field.paths for field in self.fields] if (config.CFG.getboolean("sir", "wscompat") and self.extrapaths is not None): paths.extend([self.extrapaths]) merged_paths = merge_paths(paths) for field_paths in paths: for path in field_paths: current_merged_path = merged_paths model = root_model load = Load(model) split_path = path.split(".") for pathelem in split_path: current_merged_path = current_merged_path[pathelem] column = getattr(model, pathelem) prop = column.property if isinstance(prop, RelationshipProperty): pk = column.mapper.primary_key[0].name if prop.direction == ONETOMANY: load = load.subqueryload(pathelem) elif prop.direction == MANYTOONE: load = load.joinedload(pathelem) else: load = load.defaultload(pathelem) required_columns = current_merged_path.keys() required_columns.append(pk) # Get the mapper class of the current element of the path so # the next iteration can access it. model = prop.mapper.class_ logger.debug("Loading only %s on %s", required_columns, model) load = defer_everything_but(class_mapper(model), load, *required_columns) query = query.options(load) return query
def apply(self, query: Query, value: Any) -> Query: if value is True: return query.filter( and_( Dashboard.certified_by.isnot(None), Dashboard.certified_by != "", )) if value is False: return query.filter( or_( Dashboard.certified_by.is_(None), Dashboard.certified_by == "", )) return query
def __iter__(self): if hasattr(self, 'cachekey'): try: ret = _cache[self.cachekey] except KeyError: ret = list(Query.__iter__(self)) for x in ret: self.session.expunge(x) _cache[self.cachekey] = ret return iter(self.session.merge(x, dont_load=True) for x in ret) else: return Query.__iter__(self)
def __init__(self, attr, state): Query.__init__(self, attr.target_mapper, None) self.instance = instance = state.obj() self.attr = attr mapper = object_mapper(instance) prop = mapper.get_property(self.attr.key, resolve_synonyms=True) self._criterion = prop.compare(operators.eq, instance, value_is_parent=True, alias_secondary=False) if self.attr.order_by: self._order_by = self.attr.order_by
def __init__(self, attr, state): Query.__init__(self, attr.target_mapper, None) self.instance = instance = state.obj() self.attr = attr mapper = object_mapper(instance) prop = mapper.get_property(self.attr.key, resolve_synonyms=True) self._criterion = prop.compare( operators.eq, instance, value_is_parent=True, alias_secondary=False) if self.attr.order_by: self._order_by = self.attr.order_by
def test_underscore_delete_many_query( self, mock_clean, mock_on_bulk): from sqlalchemy.orm.query import Query items = Query('asd') clean_items = Query("ASD") clean_items.all = Mock(return_value=[1, 2, 3]) clean_items.delete = Mock() mock_clean.return_value = clean_items count = docs.BaseMixin._delete_many(items) mock_clean.assert_called_once_with(items) clean_items.delete.assert_called_once_with( synchronize_session=False) mock_on_bulk.assert_called_once_with( docs.BaseMixin, [1, 2, 3], None) assert count == clean_items.delete()
def __iter__(self): if hasattr(self, 'cachekey'): try: cache = self.session._cache except AttributeError: self.session._cache = cache = {} try: ret = cache[self.cachekey] except KeyError: ret = list(Query.__iter__(self)) cache[self.cachekey] = ret return iter(ret) else: return Query.__iter__(self)
def __iter__(self): log.info("Query:\n\t%s" % unicode(self).replace("\n", "\n\t")) explain = self.session.execute(Explain(self)).fetchall() text = "\n\t".join("|".join(str(x) for x in line) for line in explain) before = time() result = Query.__iter__(self) log.info("Query Time: %0.3f Explain Query Plan:\n\t%s" % (time() - before, text)) return result
def __iter__(self): """override __iter__ to pull results from dogpile if particular attributes have been configured. Note that this approach does *not* detach the loaded objects from the current session. If the cache backend is an in-process cache (like "memory") and lives beyond the scope of the current session's transaction, those objects may be expired. The method here can be modified to first expunge() each loaded item from the current session before returning the list of items, so that the items in the cache are not the same ones in the current Session. """ if hasattr(self, '_cache_region'): return self.get_value(createfunc=lambda: list(Query.__iter__(self))) else: return Query.__iter__(self)
def __iter__(self): log.info('Query:\n\t%s' % unicode(self).replace('\n', '\n\t')) explain = self.session.execute(Explain(self)).fetchall() text = '\n\t'.join('|'.join(str(x) for x in line) for line in explain) before = time() result = Query.__iter__(self) log.info('Query Time: %0.3f Explain Query Plan:\n\t%s' % (time() - before, text)) return result
def filter_by(me, **kargs): """ A.query().filter_by( x=4, **{'b.c.d':5, ...}) """ r = me for k, v in kargs.iteritems(): attrs = k.split(".") if attrs[:-1]: r = r.join(attrs[:-1]) r = _saQuery.filter_by(r, **{attrs[-1]: v}) r = r.reset_joinpoint() return r
def __init__(self, entities, *args, **kwargs): Query.__init__(self, entities, *args, **kwargs) for entity in entities: if hasattr(entity, 'parententity'): entity = entity.parententity try: cls = _class_to_mapper(entity).class_ except AttributeError: # XXX For tables, table columns pass else: crit = getattr(cls, 'public', None) if crit is not None: if not isinstance(crit, ClauseElement): # This simplest safe way to make bare boolean column # accepted as expression. crit = cast(crit, Boolean) query = self.filter(crit) self._criterion = query._criterion
def addIfNotThere( self, table, params ): ''' Using the PrimaryKeys of the table, it looks for the record in the database. If it is not there, it is inserted as a new entry. :param table: table where to add or modify :type table: str :param params: dictionary of what to add or modify :type params: dict :return: S_OK() || S_ERROR() ''' session = self.sessionMaker_o() table_c = getattr(__import__(__name__, globals(), locals(), [table]), table) primaryKeys = [key.name for key in class_mapper(table_c).primary_key] try: select = Query(table_c, session = session) for columnName, columnValue in params.iteritems(): if not columnValue or columnName not in primaryKeys: continue column_a = getattr(table_c, columnName.lower()) if isinstance(columnValue, (list, tuple)): select = select.filter(column_a.in_(list(columnValue))) elif isinstance(columnValue, basestring): select = select.filter(column_a == columnValue) else: self.log.error("type(columnValue) == %s" %type(columnValue)) res = select.first() # the selection is done via primaryKeys only if not res: # if not there, let's insert it return self.insert(table, params) session.commit() return S_OK() except exc.SQLAlchemyError as e: session.rollback() self.log.exception( "addIfNotThere: unexpected exception", lException = e ) return S_ERROR( "addIfNotThere: unexpected exception %s" % e ) finally: session.close()
def sort(self, query: Query): params = self.params if 'sort_by' in params: sort_by = params['sort_by'] sort_column = getattr(self.model, sort_by, None) else: sort_column = self.get_default_sort_column() if sort_column is not None: return query.order_by(*sort_column) else: return query
def __iter__(self): tries = self._retry while True: try: results = list(Query.__iter__(self)) break except SQLAlchemyError as e: if tries: self.session.rollback() tries -= 1 continue raise e return iter(results)
def __repr__ (self): if self.start is not None: start = self.start.isoformat() else: start = "ever" if self.end is not None: end = self.end.isoformat() else: end = "ever" repr = "SCMQuery from %s to %s\n" % (start, end) repr += Query.__str__(self) return repr
def filter_by_subattr(me, _subattr, **kargs): """ A.query().filter( A.b.has( cod= b1.cod)).filter( A.c.has( cod= c1.cod)) == A.query().join( A.b).filter_by( cod= b1.cod).join( A.c).filter_by( cod= c1.cod)) -> A.query().filter_by_subattr( 'cod', b=b1, c=c1) used for hiding .obj_id, similar to .dbid being hidden in A.ptrB == b1, e.g. Pozicia.query( lambda self: self.firma.obj_id == f1.obj_id) -> Pozicia.query().filter_by_obj_id( firma = f1) """ r = me for k, v in kargs.iteritems(): r = r.join(k.split(".")) r = _saQuery.filter_by(r, **{_subattr: getattr(v, _subattr)}) r = r.reset_joinpoint() return r
def __iter__(self): try: cache = self.session._cache except AttributeError: self.session._cache = cache = {} stmt = self.statement.compile() params = stmt.params params.update(self._params) cachekey = str(stmt) + str(params) try: ret = cache[cachekey] except KeyError: ret = list(Query.__iter__(self)) cache[cachekey] = ret return iter(ret)
def _set_entities(self, entities, entity_wrapper=None): _SAQuery._set_entities(self, entities, entity_wrapper) self.__entities_modified = True
def __init__(self, attr, state): Query.__init__(self, attr.target_mapper, None) self.instance = state.obj() self.attr = attr