def relation(*args, **kwargs): """provide a relationship of a primary Mapper to a secondary Mapper. This corresponds to a parent-child or associative table relationship.""" if len(args) > 1 and isinstance(args[0], type): raise exceptions.ArgumentError( "relation(class, table, **kwargs) is deprecated. Please use relation(class, **kwargs) or relation(mapper, **kwargs)." ) return _relation_loader(*args, **kwargs)
def init(self): super(EagerLoader, self).init() if self.parent.isa(self.select_mapper): raise exceptions.ArgumentError( "Error creating eager relationship '%s' on parent class '%s' to child class '%s': Cant use eager loading on a self referential relationship." % (self.key, repr(self.parent.class_), repr(self.mapper.class_))) self.parent._eager_loaders.add(self.parent_property) self.clauses = {} self.clauses_by_lead_mapper = {}
def __init__(self, arg=""): values = util.Set([c.strip() for c in arg.split(',')]) self.delete_orphan = "delete-orphan" in values self.delete = "delete" in values or self.delete_orphan or "all" in values self.save_update = "save-update" in values or "all" in values self.merge = "merge" in values or "all" in values self.expunge = "expunge" in values or "all" in values # refresh_expire not really implemented as of yet #self.refresh_expire = "refresh-expire" in values or "all" in values for x in values: if x not in all_cascades: raise exceptions.ArgumentError("Invalid cascade option '%s'" % x)
def _parse_rfc1738_args(name): pattern = re.compile( r''' (\w+):// (?: ([^:/]*) (?::([^/]*))? @)? (?: ([^/:]*) (?::([^/]*))? )? (?:/(.*))? ''', re.X) m = pattern.match(name) if m is not None: (name, username, password, host, port, database) = m.group(1, 2, 3, 4, 5, 6) if database is not None: tokens = database.split(r"?", 2) database = tokens[0] query = (len(tokens) > 1 and dict(cgi.parse_qsl(tokens[1])) or None) else: query = None opts = { 'username': username, 'password': password, 'host': host, 'port': port, 'database': database, 'query': query } if opts['password'] is not None: opts['password'] = urllib.unquote_plus(opts['password']) return URL(name, **opts) else: raise exceptions.ArgumentError( "Could not parse rfc1738 URL from string '%s'" % name)
def compile_binary(binary): """assemble a SyncRule given a single binary condition""" if binary.operator != '=' or not isinstance( binary.left, schema.Column) or not isinstance( binary.right, schema.Column): return source_column = None dest_column = None if foreignkey is not None: # for self-referential relationships, # the best we can do right now is figure out which side # is the primary key # TODO: need some better way for this if binary.left.table == binary.right.table: if binary.left.primary_key: source_column = binary.left dest_column = binary.right elif binary.right.primary_key: source_column = binary.right dest_column = binary.left elif binary.left in foreignkey: dest_column = binary.left source_column = binary.right elif binary.right in foreignkey: dest_column = binary.right source_column = binary.left else: raise exceptions.ArgumentError( "Can't figure out which column is source/dest in join clause '%s'" % str(binary)) # for other relationships we are more flexible # and go off the 'foreignkey' property elif binary.left in foreignkey: dest_column = binary.left source_column = binary.right elif binary.right in foreignkey: dest_column = binary.right source_column = binary.left else: return else: if binary.left in [ f.column for f in binary.right.foreign_keys ]: dest_column = binary.right source_column = binary.left elif binary.right in [ f.column for f in binary.left.foreign_keys ]: dest_column = binary.left source_column = binary.right if source_column and dest_column: if self.direction == ONETOMANY: self.syncrules.append( SyncRule(self.parent_mapper, source_column, dest_column, dest_mapper=self.child_mapper)) elif self.direction == MANYTOONE: self.syncrules.append( SyncRule(self.child_mapper, source_column, dest_column, dest_mapper=self.parent_mapper)) else: if not issecondary: self.syncrules.append( SyncRule(self.parent_mapper, source_column, dest_column, dest_mapper=self.child_mapper, issecondary=issecondary)) else: self.syncrules.append( SyncRule(self.child_mapper, source_column, dest_column, dest_mapper=self.parent_mapper, issecondary=issecondary))
def compile(self, whereclause=None, **kwargs): """given a WHERE criterion, produce a ClauseElement-based statement suitable for usage in the execute() method.""" context = kwargs.pop('query_context', None) if context is None: context = QueryContext(self, kwargs) order_by = context.order_by from_obj = context.from_obj lockmode = context.lockmode distinct = context.distinct limit = context.limit offset = context.offset if order_by is False: order_by = self.order_by if order_by is False: if self.table.default_order_by() is not None: order_by = self.table.default_order_by() try: for_update = { 'read': 'read', 'update': True, 'update_nowait': 'nowait', None: False }[lockmode] except KeyError: raise exceptions.ArgumentError("Unknown lockmode '%s'" % lockmode) if self.mapper.single and self.mapper.polymorphic_on is not None and self.mapper.polymorphic_identity is not None: whereclause = sql.and_( whereclause, self.mapper.polymorphic_on.in_(*[ m.polymorphic_identity for m in self.mapper.polymorphic_iterator() ])) alltables = [] for l in [sql_util.TableFinder(x) for x in from_obj]: alltables += l if self.table not in alltables: from_obj.append(self.table) if self._should_nest(context): # if theres an order by, add those columns to the column list # of the "rowcount" query we're going to make if order_by: order_by = util.to_list(order_by) or [] cf = sql_util.ColumnFinder() [o.accept_visitor(cf) for o in order_by] else: cf = [] s2 = sql.select(self.table.primary_key + list(cf), whereclause, use_labels=True, from_obj=from_obj, **context.select_args()) if not distinct and order_by: s2.order_by(*util.to_list(order_by)) s3 = s2.alias('tbl_row_count') crit = s3.primary_key == self.table.primary_key statement = sql.select([], crit, from_obj=[self.table], use_labels=True, for_update=for_update) # now for the order by, convert the columns to their corresponding columns # in the "rowcount" query, and tack that new order by onto the "rowcount" query if order_by: class Aliasizer(sql_util.Aliasizer): def get_alias(self, table): return s3 order_by = [o.copy_container() for o in order_by] aliasizer = Aliasizer(*[t for t in sql_util.TableFinder(s3)]) [o.accept_visitor(aliasizer) for o in order_by] statement.order_by(*util.to_list(order_by)) else: statement = sql.select([], whereclause, from_obj=from_obj, use_labels=True, for_update=for_update, **context.select_args()) if order_by: statement.order_by(*util.to_list(order_by)) # for a DISTINCT query, you need the columns explicitly specified in order # to use it in "order_by". ensure they are in the column criterion (particularly oid). # TODO: this should be done at the SQL level not the mapper level if kwargs.get('distinct', False) and order_by: [statement.append_column(c) for c in util.to_list(order_by)] context.statement = statement # give all the attached properties a chance to modify the query for value in self.mapper.props.values(): value.setup(context) return statement