def __determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, entity_name=self.entity_name, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), entity_name=self.entity_name, compile=False) else: raise exceptions.ArgumentError("relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ("Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush") % (self.key, self.parent, inheriting)) self.target = self.mapper.mapped_table self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise exceptions.ArgumentError("In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." %(str(self))) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False)._check_compile() elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument._check_compile() else: raise exceptions.ArgumentError( "relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) # ensure the "select_mapper", if different from the regular target mapper, is compiled. self.mapper.get_select_mapper()._check_compile() if self.association is not None: if isinstance(self.association, type): self.association = mapper.class_mapper( self.association, compile=False)._check_compile() self.target = self.mapper.mapped_table self.select_mapper = self.mapper.get_select_mapper() self.select_table = self.mapper.select_table self.loads_polymorphic = self.target is not self.select_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise exceptions.ArgumentError( "In relationship '%s', can't establish 'delete-orphan' cascade rule on a self-referential relationship. You probably want cascade='all', which includes delete cascading but not orphan detection." % (str(self))) self.mapper.primary_mapper().delete_orphans.append( (self.key, self.parent.class_))
def _get_target(self): if not hasattr(self, 'mapper'): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError("relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) assert isinstance(self.mapper, mapper.Mapper), self.mapper return self.mapper
async def setup(cls, db_url: str) -> "ORM": logger.debug(f"Setting up {cls.__name__}") # Because it is mapped manually, an error occurs if it is already mapped. # 수동으로 매핑하기때문에 이미 매핑돼있을경우 오류가 발생합니다. try: class_mapper(Galleryinfo) except UnmappedClassError: cls.mapping() engine = create_async_engine(db_url) async with engine.begin() as connection: await connection.run_sync(mapper_registry.metadata.create_all, checkfirst=True) return cls(engine)
def recognizes(object_path, obj=None): if not SQLAlchemyHandler.recognizes(object_path, obj=obj): return False from sqlalchemy.orm import class_mapper try: class_mapper(obj) except: # could raise InvalidRequestError or AttributeError or who knows what else return False else: return True return False
def __init__(self, class_or_mapper, session=None, entity_name=None, lockmode=None, with_options=None, extension=None, **kwargs): if isinstance(class_or_mapper, type): self.mapper = mapper.class_mapper(class_or_mapper, entity_name=entity_name) else: self.mapper = class_or_mapper.compile() self.with_options = with_options or [] self.select_mapper = self.mapper.get_select_mapper().compile() self.always_refresh = kwargs.pop('always_refresh', self.mapper.always_refresh) self.lockmode = lockmode self.extension = mapper._ExtensionCarrier() if extension is not None: self.extension.append(extension) self.extension.append(self.mapper.extension) self.is_polymorphic = self.mapper is not self.select_mapper self._session = session if not hasattr(self.mapper, '_get_clause'): _get_clause = sql.and_() for primary_key in self.primary_key_columns: _get_clause.clauses.append(primary_key == sql.bindparam(primary_key._label, type=primary_key.type, unique=True)) self.mapper._get_clause = _get_clause self._entities = [] self._get_clause = self.mapper._get_clause self._order_by = kwargs.pop('order_by', False) self._group_by = kwargs.pop('group_by', False) self._distinct = kwargs.pop('distinct', False) self._offset = kwargs.pop('offset', None) self._limit = kwargs.pop('limit', None) self._criterion = None self._joinpoint = self.mapper self._from_obj = [self.table] for opt in util.flatten_iterator(self.with_options): opt.process_query(self)
def _post_init(self): if logging.is_info_enabled(self.logger): self.logger.info(str(self) + " setup primary join %s" % self.primaryjoin) self.logger.info(str(self) + " setup secondary join %s" % self.secondaryjoin) self.logger.info(str(self) + " synchronize pairs [%s]" % ",".join(["(%s => %s)" % (l, r) for l, r in self.synchronize_pairs])) self.logger.info(str(self) + " secondary synchronize pairs [%s]" % ",".join(["(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or []])) self.logger.info(str(self) + " local/remote pairs [%s]" % ",".join(["(%s / %s)" % (l, r) for l, r in self.local_remote_pairs])) self.logger.info(str(self) + " relation direction %s" % self.direction) if self.uselist is None and self.direction is MANYTOONE: self.uselist = False if self.uselist is None: self.uselist = True if not self.viewonly: self._dependency_processor = dependency.create_dependency_processor(self) # primary property handler, set up class attributes if self.is_primary(): # if a backref name is defined, set up an extension to populate # attributes in the other direction if self.backref is not None: self.attributeext = self.backref.get_extension() if self.backref is not None: self.backref.compile(self) elif not mapper.class_mapper(self.parent.class_, compile=False)._get_property(self.key, raiseerr=False): raise exceptions.ArgumentError("Attempting to assign a new relation '%s' to a non-primary mapper on class '%s'. New relations can only be added to the primary mapper, i.e. the very first mapper created for class '%s' " % (self.key, self.parent.class_.__name__, self.parent.class_.__name__)) super(PropertyLoader, self).do_init()
class UOWProperty(attributes.InstrumentedAttribute): """Override ``InstrumentedAttribute`` to provide an extra ``AttributeExtension`` to all managed attributes as well as the `property` property. """ def __init__(self, manager, class_, key, uselist, callable_, typecallable, cascade=None, extension=None, **kwargs): extension = util.to_list(extension or []) extension.insert(0, UOWEventHandler(key, class_, cascade=cascade)) super(UOWProperty, self).__init__(manager, key, uselist, callable_, typecallable, extension=extension, **kwargs) self.class_ = class_ property = property( lambda s: class_mapper(s.class_).props[s.key], doc="returns the MapperProperty object associated with this property")
def model_to_dict(obj, visited_children=None, back_relationships=None): if visited_children is None: visited_children = set() if back_relationships is None: back_relationships = set() serialized_data = { c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs } relationships = class_mapper(obj.__class__).relationships visitable_relationships = [(name, rel) for name, rel in relationships.items() if name not in back_relationships] for name, relation in visitable_relationships: if relation.backref: back_relationships.add(relation.backref) relationship_children = getattr(obj, name) if relationship_children is not None: if relation.uselist: children = [] for child in [ c for c in relationship_children if c not in visited_children ]: visited_children.add(child) children.append( model_to_dict(child, visited_children, back_relationships)) serialized_data[name] = children else: serialized_data[name] = model_to_dict(relationship_children, visited_children, back_relationships) return serialized_data
def _post_init(self): if self._should_log_info: self.logger.info(str(self) + " setup primary join %s" % self.primaryjoin) self.logger.info(str(self) + " setup secondary join %s" % self.secondaryjoin) self.logger.info(str(self) + " synchronize pairs [%s]" % ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs)) self.logger.info(str(self) + " secondary synchronize pairs [%s]" % ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or []))) self.logger.info(str(self) + " local/remote pairs [%s]" % ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs)) self.logger.info(str(self) + " relation direction %s" % self.direction) if self.uselist is None and self.direction is MANYTOONE: self.uselist = False if self.uselist is None: self.uselist = True if not self.viewonly: self._dependency_processor = dependency.create_dependency_processor(self) # primary property handler, set up class attributes if self.is_primary(): if self.back_populates: self.extension = util.to_list(self.extension) or [] self.extension.append(attributes.GenericBackrefExtension(self.back_populates)) self._add_reverse_property(self.back_populates) if self.backref is not None: self.backref.compile(self) elif not mapper.class_mapper(self.parent.class_, compile=False)._get_property(self.key, raiseerr=False): raise sa_exc.ArgumentError("Attempting to assign a new relation '%s' to " "a non-primary mapper on class '%s'. New relations can only be " "added to the primary mapper, i.e. the very first " "mapper created for class '%s' " % (self.key, self.parent.class_.__name__, self.parent.class_.__name__)) super(RelationProperty, self).do_init()
def _get_root_node_class(self, objtype): "Get the root node class in the polymorphic inheritance hierarchy." # FIXME: Figure out what this root_node_class business is about. if self.root_node_class is None: mapper = class_mapper(objtype) while mapper.inherits is not None: mapper = mapper.inherits self.root_node_class = mapper.class_ return self.root_node_class
def attribute_names(self): """ Return a list of the column properties for this object. :rtype: list """ return [ prop.key for prop in class_mapper(self.__class__).iterate_properties if isinstance(prop, ColumnProperty) ]
def __init__(self, *args, **kw): super(SQLAlchemyMappedClassBase, self).__init__(*args, **kw) from sqlalchemy.orm.mapper import class_mapper self.mapper = class_mapper(self.obj) if self.mapper.local_table: self.table = self.mapper.local_table elif self.mapper.select_table: self.table = self.mapper.select_table else: raise LookupError("not sure how to get a table from mapper %s" % self.mapper)
def __init__(self, *args, **kw): super(SQLAlchemyMappedClassBase, self).__init__(*args, **kw) from sqlalchemy.orm.mapper import class_mapper self.mapper = class_mapper(self.obj) if self.mapper.local_table: self.table = self.mapper.local_table elif self.mapper.select_table: self.table = self.mapper.select_table else: raise LookupError( "not sure how to get a table from mapper %s" % self.mapper)
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, entity_name=self.entity_name, compile=False)._check_compile() elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument._check_compile() else: raise exceptions.ArgumentError("relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) # ensure the "select_mapper", if different from the regular target mapper, is compiled. self.mapper.get_select_mapper()._check_compile() if self.association is not None: if isinstance(self.association, type): self.association = mapper.class_mapper(self.association, entity_name=self.entity_name, compile=False)._check_compile() self.target = self.mapper.mapped_table self.select_mapper = self.mapper.get_select_mapper() self.select_table = self.mapper.select_table self.loads_polymorphic = self.target is not self.select_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise exceptions.ArgumentError("In relationship '%s', can't establish 'delete-orphan' cascade rule on a self-referential relationship. You probably want cascade='all', which includes delete cascading but not orphan detection." %(str(self))) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
class InstrumentedAttribute(interfaces.PropComparator): """public-facing instrumented attribute.""" def __init__(self, impl, comparator=None): """Construct an InstrumentedAttribute. comparator a sql.Comparator to which class-level compare/math events will be sent """ self.impl = impl self.comparator = comparator def __set__(self, obj, value): self.impl.set(obj._state, value, None) def __delete__(self, obj): self.impl.delete(obj._state) def __get__(self, obj, owner): if obj is None: return self return self.impl.get(obj._state) def get_history(self, obj, **kwargs): return self.impl.get_history(obj._state, **kwargs) def clause_element(self): return self.comparator.clause_element() def expression_element(self): return self.comparator.expression_element() def operate(self, op, *other, **kwargs): return op(self.comparator, *other, **kwargs) def reverse_operate(self, op, other, **kwargs): return op(other, self.comparator, **kwargs) def hasparent(self, instance, optimistic=False): return self.impl.hasparent(instance._state, optimistic=optimistic) property = property( lambda s: class_mapper(s.impl.class_).get_property(s.impl.key), doc="the MapperProperty object associated with this attribute")
def _post_init(self): if self._should_log_info: self.logger.info(str(self) + " setup primary join %s" % self.primaryjoin) self.logger.info(str(self) + " setup secondary join %s" % self.secondaryjoin) self.logger.info( str(self) + " synchronize pairs [%s]" % ",".join("(%s => %s)" % (l, r) for l, r in self.synchronize_pairs) ) self.logger.info( str(self) + " secondary synchronize pairs [%s]" % ",".join(("(%s => %s)" % (l, r) for l, r in self.secondary_synchronize_pairs or [])) ) self.logger.info( str(self) + " local/remote pairs [%s]" % ",".join("(%s / %s)" % (l, r) for l, r in self.local_remote_pairs) ) self.logger.info(str(self) + " relation direction %s" % self.direction) if self.uselist is None and self.direction is MANYTOONE: self.uselist = False if self.uselist is None: self.uselist = True if not self.viewonly: self._dependency_processor = dependency.create_dependency_processor(self) # primary property handler, set up class attributes if self.is_primary(): if self.backref is not None: self.backref.compile(self) elif not mapper.class_mapper(self.parent.class_, compile=False)._get_property(self.key, raiseerr=False): raise sa_exc.ArgumentError( "Attempting to assign a new relation '%s' to " "a non-primary mapper on class '%s'. New relations can only be " "added to the primary mapper, i.e. the very first " "mapper created for class '%s' " % (self.key, self.parent.class_.__name__, self.parent.class_.__name__) ) super(RelationProperty, self).do_init()
def hybrid_column(relation, class_=None, expression=None, class_kwargs=None, default=''): if isinstance(class_, type): class_ = mapper.class_mapper(class_, compile=False) if class_kwargs is None: class_kwargs = {} def decorate(func): name = func.__name__ @wraps(func) def fget(self): obj = getattr(self, relation) if not obj: return default else: return getattr(obj.name) hybrid = hybrid_property(fget) if class_: def fset(self, value): obj = getattr(self, relation) if not obj: obj = class_(**class_kwargs) setattr(self, relation, obj) setattr(obj, name, value) hybrid.setter(fset) if expression: def expr(cls): return expression hybrid.expression(expr) return hybrid
def is_assigned(obj): try: cm = class_mapper(obj) except sqlalchemy_exc.InvalidRequestError: return False return True
def instances(self, cursor, *mappers_or_columns, **kwargs): """Return a list of mapped instances corresponding to the rows in a given *cursor* (i.e. ``ResultProxy``). \*mappers_or_columns is an optional list containing one or more of classes, mappers, strings or sql.ColumnElements which will be applied to each row and added horizontally to the result set, which becomes a list of tuples. The first element in each tuple is the usual result based on the mapper represented by this ``Query``. Each additional element in the tuple corresponds to an entry in the \*mappers_or_columns list. For each element in \*mappers_or_columns, if the element is a mapper or mapped class, an additional class instance will be present in the tuple. If the element is a string or sql.ColumnElement, the corresponding result column from each row will be present in the tuple. Note that when \*mappers_or_columns is present, "uniquing" for the result set is *disabled*, so that the resulting tuples contain entities as they actually correspond. this indicates that multiple results may be present if this option is used. """ self.__log_debug("instances()") session = self.session context = SelectionContext(self.select_mapper, session, self.extension, with_options=self.with_options, **kwargs) process = [] mappers_or_columns = tuple(self._entities) + mappers_or_columns if mappers_or_columns: for m in mappers_or_columns: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): def x(m): appender = [] def proc(context, row): if not m._instance(context, row, appender): appender.append(None) process.append((proc, appender)) x(m) elif isinstance(m, sql.ColumnElement) or isinstance(m, basestring): def y(m): res = [] def proc(context, row): res.append(row[m]) process.append((proc, res)) y(m) result = [] else: result = util.UniqueAppender([]) for row in cursor.fetchall(): self.select_mapper._instance(context, row, result) for proc in process: proc[0](context, row) # store new stuff in the identity map for value in context.identity_map.values(): session._register_persistent(value) if mappers_or_columns: return list(util.OrderedSet(zip(*([result] + [o[1] for o in process])))) else: return result.data
def is_assigned(obj): try: cm = class_mapper(obj) except sqlalchemy_exc.SQLAlchemyError: return False return True
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError( "relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument)) ) assert isinstance(self.mapper, mapper.Mapper), self.mapper # accept callables for other attributes which may require deferred initialization for attr in ("order_by", "primaryjoin", "secondaryjoin", "secondary", "_foreign_keys", "remote_side"): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ("primaryjoin", "secondaryjoin"): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)] self._foreign_keys = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys) ) self.remote_side = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self.remote_side) ) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ( "Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush" ) % (self.key, self.parent, inheriting) ) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError( "In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." % (str(self)) ) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
def compile(self, whereclause = None, **kwargs): """Given a WHERE criterion, produce a ClauseElement-based statement suitable for usage in the execute() method. the arguments to this function are deprecated and are removed in version 0.4. """ if self._statement: self._statement.use_labels = True return self._statement if self._criterion: whereclause = sql.and_(self._criterion, whereclause) if whereclause is not None and self.is_polymorphic: # adapt the given WHERECLAUSE to adjust instances of this query's mapped # table to be that of our select_table, # which may be the "polymorphic" selectable used by our mapper. sql_util.ClauseAdapter(self.table).traverse(whereclause, stop_on=util.Set([self.table])) # if extra entities, adapt the criterion to those as well for m in self._entities: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): table = m.select_table sql_util.ClauseAdapter(m.select_table).traverse(whereclause, stop_on=util.Set([m.select_table])) # get/create query context. get the ultimate compile arguments # from there context = kwargs.pop('query_context', None) if context is None: context = QueryContext(self, kwargs) order_by = context.order_by group_by = context.group_by from_obj = context.from_obj lockmode = context.lockmode distinct = context.distinct limit = context.limit offset = context.offset if order_by is False: order_by = self.mapper.order_by if order_by is False: if self.table.default_order_by() is not None: order_by = self.table.default_order_by() try: for_update = {'read':'read','update':True,'update_nowait':'nowait',None:False}[lockmode] except KeyError: raise exceptions.ArgumentError("Unknown lockmode '%s'" % lockmode) # if single-table inheritance mapper, add "typecol IN (polymorphic)" criterion so # that we only load the appropriate types if self.select_mapper.single and self.select_mapper.polymorphic_on is not None and self.select_mapper.polymorphic_identity is not None: whereclause = sql.and_(whereclause, self.select_mapper.polymorphic_on.in_(*[m.polymorphic_identity for m in self.select_mapper.polymorphic_iterator()])) alltables = [] for l in [sql_util.TableFinder(x) for x in from_obj]: alltables += l if self.table not in alltables: from_obj.append(self.table) if self._should_nest(context): # if theres an order by, add those columns to the column list # of the "rowcount" query we're going to make if order_by: order_by = util.to_list(order_by) or [] cf = sql_util.ColumnFinder() for o in order_by: cf.traverse(o) else: cf = [] s2 = sql.select(self.table.primary_key + list(cf), whereclause, use_labels=True, from_obj=from_obj, **context.select_args()) if order_by: s2.order_by(*util.to_list(order_by)) s3 = s2.alias('tbl_row_count') crit = s3.primary_key==self.table.primary_key statement = sql.select([], crit, use_labels=True, for_update=for_update) # now for the order by, convert the columns to their corresponding columns # in the "rowcount" query, and tack that new order by onto the "rowcount" query if order_by: statement.order_by(*sql_util.ClauseAdapter(s3).copy_and_process(order_by)) else: statement = sql.select([], whereclause, from_obj=from_obj, use_labels=True, for_update=for_update, **context.select_args()) if order_by: statement.order_by(*util.to_list(order_by)) # for a DISTINCT query, you need the columns explicitly specified in order # to use it in "order_by". ensure they are in the column criterion (particularly oid). # TODO: this should be done at the SQL level not the mapper level if kwargs.get('distinct', False) and order_by: [statement.append_column(c) for c in util.to_list(order_by)] context.statement = statement # give all the attached properties a chance to modify the query # TODO: doing this off the select_mapper. if its the polymorphic mapper, then # it has no relations() on it. should we compile those too into the query ? (i.e. eagerloads) for value in self.select_mapper.iterate_properties: value.setup(context) # additional entities/columns, add those to selection criterion for m in self._entities: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): for value in m.iterate_properties: value.setup(context) elif isinstance(m, sql.ColumnElement): statement.append_column(m) return statement
def list_tags(cls): return db.session.execute('select distinct(kv.*) from tag,jsonb_each(tag.tags) as kv', mapper=class_mapper(cls))
def compile(self, whereclause = None, **kwargs): """Given a WHERE criterion, produce a ClauseElement-based statement suitable for usage in the execute() method. """ if self._criterion: whereclause = sql.and_(self._criterion, whereclause) if whereclause is not None and self.is_polymorphic: # adapt the given WHERECLAUSE to adjust instances of this query's mapped # table to be that of our select_table, # which may be the "polymorphic" selectable used by our mapper. sql_util.ClauseAdapter(self.table).traverse(whereclause) # if extra entities, adapt the criterion to those as well for m in self._entities: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): table = m.select_table sql_util.ClauseAdapter(m.select_table).traverse(whereclause) # get/create query context. get the ultimate compile arguments # from there context = kwargs.pop('query_context', None) if context is None: context = QueryContext(self, kwargs) order_by = context.order_by group_by = context.group_by from_obj = context.from_obj lockmode = context.lockmode distinct = context.distinct limit = context.limit offset = context.offset if order_by is False: order_by = self.mapper.order_by if order_by is False: if self.table.default_order_by() is not None: order_by = self.table.default_order_by() try: for_update = {'read':'read','update':True,'update_nowait':'nowait',None:False}[lockmode] except KeyError: raise exceptions.ArgumentError("Unknown lockmode '%s'" % lockmode) # if single-table inheritance mapper, add "typecol IN (polymorphic)" criterion so # that we only load the appropriate types if self.select_mapper.single and self.select_mapper.polymorphic_on is not None and self.select_mapper.polymorphic_identity is not None: whereclause = sql.and_(whereclause, self.select_mapper.polymorphic_on.in_(*[m.polymorphic_identity for m in self.select_mapper.polymorphic_iterator()])) alltables = [] for l in [sql_util.TableFinder(x) for x in from_obj]: alltables += l if self.table not in alltables: from_obj.append(self.table) if self._should_nest(context): # if theres an order by, add those columns to the column list # of the "rowcount" query we're going to make if order_by: order_by = util.to_list(order_by) or [] cf = sql_util.ColumnFinder() for o in order_by: cf.traverse(o) else: cf = [] s2 = sql.select(self.table.primary_key + list(cf), whereclause, use_labels=True, from_obj=from_obj, **context.select_args()) if order_by: s2.order_by(*util.to_list(order_by)) s3 = s2.alias('tbl_row_count') crit = s3.primary_key==self.table.primary_key statement = sql.select([], crit, use_labels=True, for_update=for_update) # now for the order by, convert the columns to their corresponding columns # in the "rowcount" query, and tack that new order by onto the "rowcount" query if order_by: statement.order_by(*sql_util.ClauseAdapter(s3).copy_and_process(order_by)) else: statement = sql.select([], whereclause, from_obj=from_obj, use_labels=True, for_update=for_update, **context.select_args()) if order_by: statement.order_by(*util.to_list(order_by)) # for a DISTINCT query, you need the columns explicitly specified in order # to use it in "order_by". ensure they are in the column criterion (particularly oid). # TODO: this should be done at the SQL level not the mapper level if kwargs.get('distinct', False) and order_by: [statement.append_column(c) for c in util.to_list(order_by)] context.statement = statement # give all the attached properties a chance to modify the query # TODO: doing this off the select_mapper. if its the polymorphic mapper, then # it has no relations() on it. should we compile those too into the query ? (i.e. eagerloads) for value in self.select_mapper.props.values(): value.setup(context) # additional entities/columns, add those to selection criterion for m in self._entities: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): for value in m.props.values(): value.setup(context) elif isinstance(m, sql.ColumnElement): statement.append_column(m) return statement
def _property(self): from sqlalchemy.orm.mapper import class_mapper return class_mapper(self.impl.class_).get_property(self.impl.key)
def instances(self, cursor, *mappers_or_columns, **kwargs): """Return a list of mapped instances corresponding to the rows in a given *cursor* (i.e. ``ResultProxy``). \*mappers_or_columns is an optional list containing one or more of classes, mappers, strings or sql.ColumnElements which will be applied to each row and added horizontally to the result set, which becomes a list of tuples. The first element in each tuple is the usual result based on the mapper represented by this ``Query``. Each additional element in the tuple corresponds to an entry in the \*mappers_or_columns list. For each element in \*mappers_or_columns, if the element is a mapper or mapped class, an additional class instance will be present in the tuple. If the element is a string or sql.ColumnElement, the corresponding result column from each row will be present in the tuple. Note that when \*mappers_or_columns is present, "uniquing" for the result set is *disabled*, so that the resulting tuples contain entities as they actually correspond. this indicates that multiple results may be present if this option is used. """ self.__log_debug("instances()") session = self.session context = SelectionContext(self.select_mapper, session, self.extension, with_options=self.with_options, **kwargs) process = [] mappers_or_columns = tuple(self._entities) + mappers_or_columns if mappers_or_columns: for m in mappers_or_columns: if isinstance(m, type): m = mapper.class_mapper(m) if isinstance(m, mapper.Mapper): appender = [] def proc(context, row): if not m._instance(context, row, appender): appender.append(None) process.append((proc, appender)) elif isinstance(m, sql.ColumnElement) or isinstance(m, basestring): res = [] def proc(context, row): res.append(row[m]) process.append((proc, res)) result = [] else: result = util.UniqueAppender([]) for row in cursor.fetchall(): self.select_mapper._instance(context, row, result) for proc in process: proc[0](context, row) # store new stuff in the identity map for value in context.identity_map.values(): session._register_persistent(value) if mappers_or_columns: return zip(*([result] + [o[1] for o in process])) else: return result.data
def get_tag(cls, resource_id, resource_type): return db.session.execute("select t.* from tag, jsonb_each(tags) as t\ where tag.resource_id = '%s' and tag.resource_type = '%s'" % ( resource_id, resource_type), mapper=class_mapper(cls))
def is_assigned(obj): try: cm = class_mapper(obj) except sqlalchemy.exceptions.SQLAlchemyError: return False return True
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError( "relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) assert isinstance(self.mapper, mapper.Mapper), self.mapper # accept callables for other attributes which may require deferred initialization for attr in ('order_by', 'primaryjoin', 'secondaryjoin', 'secondary', '_foreign_keys', 'remote_side'): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ('primaryjoin', 'secondaryjoin'): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [ expression._literal_as_column(x) for x in util.to_list(self.order_by) ] self._foreign_keys = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys)) self.remote_side = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self.remote_side)) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property( self.key, raiseerr=False): util.warn( ("Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush") % (self.key, self.parent, inheriting)) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError( "In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." % (str(self))) self.mapper.primary_mapper().delete_orphans.append( (self.key, self.parent.class_))