def construct_params(self, params=None): """return a dictionary of bind parameter keys and values""" if params: params = util.column_dict(params) pd = {} for bindparam, name in self.bind_names.iteritems(): for paramname in (bindparam.key, bindparam.shortname, name): if paramname in params: pd[name] = params[paramname] break else: if util.callable(bindparam.value): pd[name] = bindparam.value() else: pd[name] = bindparam.value return pd else: pd = {} for bindparam in self.bind_names: if util.callable(bindparam.value): pd[self.bind_names[bindparam]] = bindparam.value() else: pd[self.bind_names[bindparam]] = bindparam.value return pd
def construct_params(self, params=None): """return a dictionary of bind parameter keys and values""" if params: params = util.column_dict(params) pd = {} for bindparam, name in self.bind_names.iteritems(): for paramname in (bindparam, bindparam.key, bindparam.shortname, name): if paramname in params: pd[name] = params[paramname] break else: if util.callable(bindparam.value): pd[name] = bindparam.value() else: pd[name] = bindparam.value return pd else: pd = {} for bindparam in self.bind_names: if util.callable(bindparam.value): pd[self.bind_names[bindparam]] = bindparam.value() else: pd[self.bind_names[bindparam]] = bindparam.value return pd
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_statement = _process_engine_statement(context.unicode_statement, context) _received_parameters = context.compiled_parameters equivalent = bool(self.regex.match(_received_statement)) if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] # do a positive compare only for param, received in zip(params, _received_parameters): for k, v in param.iteritems(): if k not in received or received[k] != v: equivalent = False break else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for regex %r partial params %r, "\ "received %r with params %r" % (self.orig_regex, params, _received_statement, _received_parameters)
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_statement = _process_engine_statement( context.unicode_statement, context) _received_parameters = context.compiled_parameters # TODO: remove this step once all unit tests # are migrated, as ExactSQL should really be *exact* SQL sql = _process_assertion_statement(self.sql, context) equivalent = _received_statement == sql if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] equivalent = equivalent and params == context.compiled_parameters else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for exact statement %r exact params %r, " \ "received %r with params %r" % (sql, params, _received_statement, _received_parameters)
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_statement = _process_engine_statement( context.unicode_statement, context) _received_parameters = context.compiled_parameters equivalent = bool(self.regex.match(_received_statement)) if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] # do a positive compare only for param, received in zip(params, _received_parameters): for k, v in param.iteritems(): if k not in received or received[k] != v: equivalent = False break else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for regex %r partial params %r, "\ "received %r with params %r" % (self.orig_regex, params, _received_statement, _received_parameters)
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_parameters = context.compiled_parameters # recompile from the context, using the default dialect compiled = context.compiled.statement.\ compile(dialect=DefaultDialect(), column_keys=context.compiled.column_keys) _received_statement = re.sub(r'\n', '', str(compiled)) equivalent = self.statement == _received_statement if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] # do a positive compare only for param, received in zip(params, _received_parameters): for k, v in param.iteritems(): if k not in received or received[k] != v: equivalent = False break else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for compiled statement %r partial params %r, " \ "received %r with params %r" % (self.statement, params, _received_statement, _received_parameters)
def as_predicate(cls, predicate, description=None): if isinstance(predicate, compound): return cls.as_predicate(predicate.fails.union(predicate.skips)) elif isinstance(predicate, Predicate): if description and predicate.description is None: predicate.description = description return predicate elif isinstance(predicate, (list, set)): return OrPredicate( [cls.as_predicate(pred) for pred in predicate], description) elif isinstance(predicate, tuple): return SpecPredicate(*predicate) elif isinstance(predicate, compat.string_types): tokens = predicate.split(" ", 2) op = spec = None db = tokens.pop(0) if tokens: op = tokens.pop(0) if tokens: spec = tuple(int(d) for d in tokens.pop(0).split(".")) return SpecPredicate(db, op, spec, description=description) elif util.callable(predicate): return LambdaPredicate(predicate, description) else: assert False, "unknown predicate type: %s" % predicate
def as_predicate(cls, predicate, description=None): if isinstance(predicate, compound): return cls.as_predicate(predicate.fails.union(predicate.skips)) elif isinstance(predicate, Predicate): if description and predicate.description is None: predicate.description = description return predicate elif isinstance(predicate, (list, set)): return OrPredicate([cls.as_predicate(pred) for pred in predicate], description) elif isinstance(predicate, tuple): return SpecPredicate(*predicate) elif isinstance(predicate, compat.string_types): tokens = predicate.split(" ", 2) op = spec = None db = tokens.pop(0) if tokens: op = tokens.pop(0) if tokens: spec = tuple(int(d) for d in tokens.pop(0).split(".")) return SpecPredicate(db, op, spec, description=description) elif util.callable(predicate): return LambdaPredicate(predicate, description) else: assert False, "unknown predicate type: %s" % predicate
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_statement = _process_engine_statement(context.unicode_statement, context) _received_parameters = context.compiled_parameters # TODO: remove this step once all unit tests # are migrated, as ExactSQL should really be *exact* SQL sql = _process_assertion_statement(self.sql, context) equivalent = _received_statement == sql if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] equivalent = equivalent and params == context.compiled_parameters else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for exact statement %r exact params %r, " \ "received %r with params %r" % (sql, params, _received_statement, _received_parameters)
def to_instance(typeobj, *arg, **kw): if typeobj is None: return NULLTYPE if util.callable(typeobj): return typeobj(*arg, **kw) else: return typeobj
def visit_binary(self, binary, **kwargs): op = self.operator_string(binary.operator) if util.callable(op): return op(self.process(binary.left), self.process(binary.right), **binary.modifiers) else: return self.process(binary.left) + " " + op + " " + self.process( binary.right)
def visit_function(self, func, result_map=None, **kwargs): if result_map is not None: result_map[func.name.lower()] = (func.name, None, func.type) name = self.function_string(func) if util.callable(name): return name(*[self.process(x) for x in func.clauses]) else: return ".".join(func.packagenames + [name]) % {'expr':self.function_argspec(func)}
def _get_target(self): if not hasattr(self, 'mapper'): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError("relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument))) assert isinstance(self.mapper, mapper.Mapper), self.mapper return self.mapper
def as_predicate(cls, predicate): if isinstance(predicate, Predicate): return predicate elif isinstance(predicate, list): return OrPredicate([cls.as_predicate(pred) for pred in predicate]) elif isinstance(predicate, tuple): return SpecPredicate(*predicate) elif isinstance(predicate, basestring): return SpecPredicate(predicate, None, None) elif util.callable(predicate): return LambdaPredicate(predicate) else: assert False, "unknown predicate type: %s" % predicate
def _apply_all(self, methods): # must copy keys atomically for rec in self.proxy_refs.keys(): if rec is not None and rec.is_valid: try: for name in methods: if callable(name): name(rec) else: getattr(rec, name)() except (SystemExit, KeyboardInterrupt): raise except Exception, e: warnings.warn("testing_reaper couldn't close connection: %s" % e)
def _apply_all(self, methods): for rec in self.proxy_refs: if rec is not None and rec.is_valid: try: for name in methods: if callable(name): name(rec) else: getattr(rec, name)() except (SystemExit, KeyboardInterrupt): raise except Exception, e: # fixme sys.stderr.write("\n" + str(e) + "\n")
def _apply_all(self, methods): # must copy keys atomically for rec in self.proxy_refs.keys(): if rec is not None and rec.is_valid: try: for name in methods: if callable(name): name(rec) else: getattr(rec, name)() except (SystemExit, KeyboardInterrupt): raise except Exception, e: warnings.warn( "testing_reaper couldn't close connection: %s" % e)
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_parameters = list(context.compiled_parameters) # recompile from the context, using the default dialect compiled = \ context.compiled.statement.compile(dialect=DefaultDialect(), column_keys=context.compiled.column_keys) _received_statement = re.sub(r'\n', '', str(compiled)) equivalent = self.statement == _received_statement if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] all_params = list(params) all_received = list(_received_parameters) while params: param = dict(params.pop(0)) for k, v in context.compiled.params.iteritems(): param.setdefault(k, v) if param not in _received_parameters: equivalent = False break else: _received_parameters.remove(param) if _received_parameters: equivalent = False else: params = {} all_params = {} all_received = [] self._result = equivalent if not self._result: print 'Testing for compiled statement %r partial params '\ '%r, received %r with params %r' % (self.statement, all_params, _received_statement, all_received) self._errmsg = \ 'Testing for compiled statement %r partial params %r, '\ 'received %r with params %r' % (self.statement, all_params, _received_statement, all_received)
def process_cursor_execute(self, statement, parameters, context, executemany): if not context: return _received_parameters = list(context.compiled_parameters) # recompile from the context, using the default dialect compiled = context.compiled.statement.\ compile(dialect=DefaultDialect(), column_keys=context.compiled.column_keys) _received_statement = re.sub(r'\n', '', str(compiled)) equivalent = self.statement == _received_statement if self.params: if util.callable(self.params): params = self.params(context) else: params = self.params if not isinstance(params, list): params = [params] all_params = list(params) all_received = list(_received_parameters) while params: param = dict(params.pop(0)) for k, v in context.compiled.params.iteritems(): param.setdefault(k, v) if param not in _received_parameters: equivalent = False break else: _received_parameters.remove(param) if _received_parameters: equivalent = False else: params = {} self._result = equivalent if not self._result: self._errmsg = "Testing for compiled statement %r partial params %r, " \ "received %r with params %r" % \ (self.statement, all_params, _received_statement, all_received)
def _process_dependent_arguments(self): # accept callables for other attributes which may require deferred initialization for attr in ('order_by', 'primaryjoin', 'secondaryjoin', 'secondary', '_foreign_keys', 'remote_side'): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ('primaryjoin', 'secondaryjoin'): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)] self._foreign_keys = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys)) self.remote_side = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self.remote_side)) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ("Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush") % (self.key, self.parent, inheriting)) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError("In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." %(str(self))) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
def __init__(self, func): if not util.callable(func): raise exc.ArgumentError('func must be callable') self.type = type_api.NULLTYPE self.func = func
class OrderingList(list): """A custom list that manages position information for its children. See the module and __init__ documentation for more details. The ``ordering_list`` factory function is used to configure ``OrderingList`` collections in ``mapper`` relationship definitions. """ def __init__(self, ordering_attr=None, ordering_func=None, reorder_on_append=False): """A custom list that manages position information for its children. ``OrderingList`` is a ``collection_class`` list implementation that syncs position in a Python list with a position attribute on the mapped objects. This implementation relies on the list starting in the proper order, so be **sure** to put an ``order_by`` on your relationship. :param ordering_attr: Name of the attribute that stores the object's order in the relationship. :param ordering_func: Optional. A function that maps the position in the Python list to a value to store in the ``ordering_attr``. Values returned are usually (but need not be!) integers. An ``ordering_func`` is called with two positional parameters: the index of the element in the list, and the list itself. If omitted, Python list indexes are used for the attribute values. Two basic pre-built numbering functions are provided in this module: ``count_from_0`` and ``count_from_1``. For more exotic examples like stepped numbering, alphabetical and Fibonacci numbering, see the unit tests. :param reorder_on_append: Default False. When appending an object with an existing (non-None) ordering value, that value will be left untouched unless ``reorder_on_append`` is true. This is an optimization to avoid a variety of dangerous unexpected database writes. SQLAlchemy will add instances to the list via append() when your object loads. If for some reason the result set from the database skips a step in the ordering (say, row '1' is missing but you get '2', '3', and '4'), reorder_on_append=True would immediately renumber the items to '1', '2', '3'. If you have multiple sessions making changes, any of whom happen to load this collection even in passing, all of the sessions would try to "clean up" the numbering in their commits, possibly causing all but one to fail with a concurrent modification error. Spooky action at a distance. Recommend leaving this with the default of False, and just call ``reorder()`` if you're doing ``append()`` operations with previously ordered instances or when doing some housekeeping after manual sql operations. """ self.ordering_attr = ordering_attr if ordering_func is None: ordering_func = count_from_0 self.ordering_func = ordering_func self.reorder_on_append = reorder_on_append # More complex serialization schemes (multi column, e.g.) are possible by # subclassing and reimplementing these two methods. def _get_order_value(self, entity): return getattr(entity, self.ordering_attr) def _set_order_value(self, entity, value): setattr(entity, self.ordering_attr, value) def reorder(self): """Synchronize ordering for the entire collection. Sweeps through the list and ensures that each object has accurate ordering information set. """ for index, entity in enumerate(self): self._order_entity(index, entity, True) # As of 0.5, _reorder is no longer semi-private _reorder = reorder def _order_entity(self, index, entity, reorder=True): have = self._get_order_value(entity) # Don't disturb existing ordering if reorder is False if have is not None and not reorder: return should_be = self.ordering_func(index, self) if have != should_be: self._set_order_value(entity, should_be) def append(self, entity): super(OrderingList, self).append(entity) self._order_entity(len(self) - 1, entity, self.reorder_on_append) def _raw_append(self, entity): """Append without any ordering behavior.""" super(OrderingList, self).append(entity) _raw_append = collection.adds(1)(_raw_append) def insert(self, index, entity): super(OrderingList, self).insert(index, entity) self._reorder() def remove(self, entity): super(OrderingList, self).remove(entity) self._reorder() def pop(self, index=-1): entity = super(OrderingList, self).pop(index) self._reorder() return entity def __setitem__(self, index, entity): if isinstance(index, slice): step = index.step or 1 start = index.start or 0 if start < 0: start += len(self) stop = index.stop or len(self) if stop < 0: stop += len(self) for i in xrange(start, stop, step): self.__setitem__(i, entity[i]) else: self._order_entity(index, entity, True) super(OrderingList, self).__setitem__(index, entity) def __delitem__(self, index): super(OrderingList, self).__delitem__(index) self._reorder() # Py2K def __setslice__(self, start, end, values): super(OrderingList, self).__setslice__(start, end, values) self._reorder() def __delslice__(self, start, end): super(OrderingList, self).__delslice__(start, end) self._reorder() # end Py2K for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(list, func_name)): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func
class _AssociationSet(_AssociationCollection): """Generic, converting, set-to-set proxy.""" def _create(self, value): return self.creator(value) def _get(self, object): return self.getter(object) def _set(self, object, value): return self.setter(object, value) def __len__(self): return len(self.col) def __nonzero__(self): if self.col: return True else: return False def __contains__(self, value): for member in self.col: # testlib.pragma exempt:__eq__ if self._get(member) == value: return True return False def __iter__(self): """Iterate over proxied values. For the actual domain objects, iterate over .col instead or just use the underlying collection directly from its property on the parent. """ for member in self.col: yield self._get(member) raise StopIteration def add(self, value): if value not in self: self.col.add(self._create(value)) # for discard and remove, choosing a more expensive check strategy rather # than call self.creator() def discard(self, value): for member in self.col: if self._get(member) == value: self.col.discard(member) break def remove(self, value): for member in self.col: if self._get(member) == value: self.col.discard(member) return raise KeyError(value) def pop(self): if not self.col: raise KeyError('pop from an empty set') member = self.col.pop() return self._get(member) def update(self, other): for value in other: self.add(value) def __ior__(self, other): if not collections._set_binops_check_strict(self, other): return NotImplemented for value in other: self.add(value) return self def _set(self): return set(iter(self)) def union(self, other): return set(self).union(other) __or__ = union def difference(self, other): return set(self).difference(other) __sub__ = difference def difference_update(self, other): for value in other: self.discard(value) def __isub__(self, other): if not collections._set_binops_check_strict(self, other): return NotImplemented for value in other: self.discard(value) return self def intersection(self, other): return set(self).intersection(other) __and__ = intersection def intersection_update(self, other): want, have = self.intersection(other), set(self) remove, add = have - want, want - have for value in remove: self.remove(value) for value in add: self.add(value) def __iand__(self, other): if not collections._set_binops_check_strict(self, other): return NotImplemented want, have = self.intersection(other), set(self) remove, add = have - want, want - have for value in remove: self.remove(value) for value in add: self.add(value) return self def symmetric_difference(self, other): return set(self).symmetric_difference(other) __xor__ = symmetric_difference def symmetric_difference_update(self, other): want, have = self.symmetric_difference(other), set(self) remove, add = have - want, want - have for value in remove: self.remove(value) for value in add: self.add(value) def __ixor__(self, other): if not collections._set_binops_check_strict(self, other): return NotImplemented want, have = self.symmetric_difference(other), set(self) remove, add = have - want, want - have for value in remove: self.remove(value) for value in add: self.add(value) return self def issubset(self, other): return set(self).issubset(other) def issuperset(self, other): return set(self).issuperset(other) def clear(self): self.col.clear() def copy(self): return set(self) def __eq__(self, other): return set(self) == other def __ne__(self, other): return set(self) != other def __lt__(self, other): return set(self) < other def __le__(self, other): return set(self) <= other def __gt__(self, other): return set(self) > other def __ge__(self, other): return set(self) >= other def __repr__(self): return repr(set(self)) def __hash__(self): raise TypeError("%s objects are unhashable" % type(self).__name__) for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(set, func_name)): func.__doc__ = getattr(set, func_name).__doc__ del func_name, func
class _AssociationDict(_AssociationCollection): """Generic, converting, dict-to-dict proxy.""" def _create(self, key, value): return self.creator(key, value) def _get(self, object): return self.getter(object) def _set(self, object, key, value): return self.setter(object, key, value) def __getitem__(self, key): return self._get(self.col[key]) def __setitem__(self, key, value): if key in self.col: self._set(self.col[key], key, value) else: self.col[key] = self._create(key, value) def __delitem__(self, key): del self.col[key] def __contains__(self, key): # testlib.pragma exempt:__hash__ return key in self.col def has_key(self, key): # testlib.pragma exempt:__hash__ return key in self.col def __iter__(self): return self.col.iterkeys() def clear(self): self.col.clear() def __eq__(self, other): return dict(self) == other def __ne__(self, other): return dict(self) != other def __lt__(self, other): return dict(self) < other def __le__(self, other): return dict(self) <= other def __gt__(self, other): return dict(self) > other def __ge__(self, other): return dict(self) >= other def __cmp__(self, other): return cmp(dict(self), other) def __repr__(self): return repr(dict(self.items())) def get(self, key, default=None): try: return self[key] except KeyError: return default def setdefault(self, key, default=None): if key not in self.col: self.col[key] = self._create(key, default) return default else: return self[key] def keys(self): return self.col.keys() def iterkeys(self): return self.col.iterkeys() def values(self): return [self._get(member) for member in self.col.values()] def itervalues(self): for key in self.col: yield self._get(self.col[key]) raise StopIteration def items(self): return [(k, self._get(self.col[k])) for k in self] def iteritems(self): for key in self.col: yield (key, self._get(self.col[key])) raise StopIteration def pop(self, key, default=_NotProvided): if default is _NotProvided: member = self.col.pop(key) else: member = self.col.pop(key, default) return self._get(member) def popitem(self): item = self.col.popitem() return (item[0], self._get(item[1])) def update(self, *a, **kw): if len(a) > 1: raise TypeError('update expected at most 1 arguments, got %i' % len(a)) elif len(a) == 1: seq_or_map = a[0] # discern dict from sequence - took the advice # from http://www.voidspace.org.uk/python/articles/duck_typing.shtml # still not perfect :( if hasattr(seq_or_map, 'keys'): for item in seq_or_map: self[item] = seq_or_map[item] else: try: for k, v in seq_or_map: self[k] = v except ValueError: raise ValueError("dictionary update sequence " "requires 2-element tuples") for key, value in kw: self[key] = value def copy(self): return dict(self.items()) def __hash__(self): raise TypeError("%s objects are unhashable" % type(self).__name__) for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(dict, func_name)): func.__doc__ = getattr(dict, func_name).__doc__ del func_name, func
class _AssociationList(_AssociationCollection): """Generic, converting, list-to-list proxy.""" def _create(self, value): return self.creator(value) def _get(self, object): return self.getter(object) def _set(self, object, value): return self.setter(object, value) def __getitem__(self, index): return self._get(self.col[index]) def __setitem__(self, index, value): if not isinstance(index, slice): self._set(self.col[index], value) else: if index.stop is None: stop = len(self) elif index.stop < 0: stop = len(self) + index.stop else: stop = index.stop step = index.step or 1 rng = range(index.start or 0, stop, step) if step == 1: for i in rng: del self[index.start] i = index.start for item in value: self.insert(i, item) i += 1 else: if len(value) != len(rng): raise ValueError( "attempt to assign sequence of size %s to " "extended slice of size %s" % (len(value), len(rng))) for i, item in zip(rng, value): self._set(self.col[i], item) def __delitem__(self, index): del self.col[index] def __contains__(self, value): for member in self.col: # testlib.pragma exempt:__eq__ if self._get(member) == value: return True return False def __getslice__(self, start, end): return [self._get(member) for member in self.col[start:end]] def __setslice__(self, start, end, values): members = [self._create(v) for v in values] self.col[start:end] = members def __delslice__(self, start, end): del self.col[start:end] def __iter__(self): """Iterate over proxied values. For the actual domain objects, iterate over .col instead or just use the underlying collection directly from its property on the parent. """ for member in self.col: yield self._get(member) raise StopIteration def append(self, value): item = self._create(value) self.col.append(item) def count(self, value): return sum( [1 for _ in itertools.ifilter(lambda v: v == value, iter(self))]) def extend(self, values): for v in values: self.append(v) def insert(self, index, value): self.col[index:index] = [self._create(value)] def pop(self, index=-1): return self.getter(self.col.pop(index)) def remove(self, value): for i, val in enumerate(self): if val == value: del self.col[i] return raise ValueError("value not in list") def reverse(self): """Not supported, use reversed(mylist)""" raise NotImplementedError def sort(self): """Not supported, use sorted(mylist)""" raise NotImplementedError def clear(self): del self.col[0:len(self.col)] def __eq__(self, other): return list(self) == other def __ne__(self, other): return list(self) != other def __lt__(self, other): return list(self) < other def __le__(self, other): return list(self) <= other def __gt__(self, other): return list(self) > other def __ge__(self, other): return list(self) >= other def __cmp__(self, other): return cmp(list(self), other) def __add__(self, iterable): try: other = list(iterable) except TypeError: return NotImplemented return list(self) + other def __radd__(self, iterable): try: other = list(iterable) except TypeError: return NotImplemented return other + list(self) def __mul__(self, n): if not isinstance(n, int): return NotImplemented return list(self) * n __rmul__ = __mul__ def __iadd__(self, iterable): self.extend(iterable) return self def __imul__(self, n): # unlike a regular list *=, proxied __imul__ will generate unique # backing objects for each copy. *= on proxied lists is a bit of # a stretch anyhow, and this interpretation of the __imul__ contract # is more plausibly useful than copying the backing objects. if not isinstance(n, int): return NotImplemented if n == 0: self.clear() elif n > 1: self.extend(list(self) * (n - 1)) return self def copy(self): return list(self) def __repr__(self): return repr(list(self)) def __hash__(self): raise TypeError("%s objects are unhashable" % type(self).__name__) for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(list, func_name)): func.__doc__ = getattr(list, func_name).__doc__ del func_name, func
class _AssociationDict(_AssociationCollection): """Generic, converting, dict-to-dict proxy.""" def _create(self, key, value): return self.creator(key, value) def _get(self, object): return self.getter(object) def _set(self, object, key, value): return self.setter(object, key, value) def __getitem__(self, key): return self._get(self.col[key]) def __setitem__(self, key, value): if key in self.col: self._set(self.col[key], key, value) else: self.col[key] = self._create(key, value) def __delitem__(self, key): del self.col[key] def __contains__(self, key): # testlib.pragma exempt:__hash__ return key in self.col def has_key(self, key): # testlib.pragma exempt:__hash__ return key in self.col def __iter__(self): return iter(self.col.keys()) def clear(self): self.col.clear() def __eq__(self, other): return dict(self) == other def __ne__(self, other): return dict(self) != other def __lt__(self, other): return dict(self) < other def __le__(self, other): return dict(self) <= other def __gt__(self, other): return dict(self) > other def __ge__(self, other): return dict(self) >= other def __cmp__(self, other): return cmp(dict(self), other) def __repr__(self): return repr(dict(list(self.items()))) def get(self, key, default=None): try: return self[key] except KeyError: return default def setdefault(self, key, default=None): if key not in self.col: self.col[key] = self._create(key, default) return default else: return self[key] def keys(self): return list(self.col.keys()) def iterkeys(self): return iter(self.col.keys()) def values(self): return [self._get(member) for member in list(self.col.values())] def itervalues(self): for key in self.col: yield self._get(self.col[key]) raise StopIteration def items(self): return [(k, self._get(self.col[k])) for k in self] def iteritems(self): for key in self.col: yield (key, self._get(self.col[key])) raise StopIteration def pop(self, key, default=_NotProvided): if default is _NotProvided: member = self.col.pop(key) else: member = self.col.pop(key, default) return self._get(member) def popitem(self): item = self.col.popitem() return (item[0], self._get(item[1])) def update(self, *a, **kw): if len(a) > 1: raise TypeError('update expected at most 1 arguments, got %i' % len(a)) elif len(a) == 1: seq_or_map = a[0] for item in seq_or_map: if isinstance(item, tuple): self[item[0]] = item[1] else: self[item] = seq_or_map[item] for key, value in kw: self[key] = value def copy(self): return dict(list(self.items())) def __hash__(self): raise TypeError("%s objects are unhashable" % type(self).__name__) for func_name, func in list(locals().items()): if (util.callable(func) and func.__name__ == func_name and not func.__doc__ and hasattr(dict, func_name)): func.__doc__ = getattr(dict, func_name).__doc__ del func_name, func
def comparator(self): if util.callable(self._comparator): self._comparator = self._comparator() if self.adapter: self._comparator = self._comparator.adapted(self.adapter) return self._comparator
def _instrument_class(cls): """Modify methods in a class and install instrumentation.""" # TODO: more formally document this as a decoratorless/Python 2.3 # option for specifying instrumentation. (likely doc'd here in code only, # not in online docs.) Useful for C types too. # # __instrumentation__ = { # 'rolename': 'methodname', # ... # 'methods': { # 'methodname': ('fire_{append,remove}_event', argspec, # 'fire_{append,remove}_event'), # 'append': ('fire_append_event', 1, None), # '__setitem__': ('fire_append_event', 1, 'fire_remove_event'), # 'pop': (None, None, 'fire_remove_event'), # } # } # In the normal call flow, a request for any of the 3 basic collection # types is transformed into one of our trivial subclasses # (e.g. InstrumentedList). Catch anything else that sneaks in here... if cls.__module__ == '__builtin__': raise sa_exc.ArgumentError( "Can not instrument a built-in type. Use a " "subclass, even a trivial one.") collection_type = util.duck_type_collection(cls) if collection_type in __interfaces: roles = __interfaces[collection_type].copy() decorators = roles.pop('_decorators', {}) else: roles, decorators = {}, {} if hasattr(cls, '__instrumentation__'): roles.update(copy.deepcopy(getattr(cls, '__instrumentation__'))) methods = roles.pop('methods', {}) for name in dir(cls): method = getattr(cls, name, None) if not util.callable(method): continue # note role declarations if hasattr(method, '_sa_instrument_role'): role = method._sa_instrument_role assert role in ('appender', 'remover', 'iterator', 'on_link', 'converter') roles[role] = name # transfer instrumentation requests from decorated function # to the combined queue before, after = None, None if hasattr(method, '_sa_instrument_before'): op, argument = method._sa_instrument_before assert op in ('fire_append_event', 'fire_remove_event') before = op, argument if hasattr(method, '_sa_instrument_after'): op = method._sa_instrument_after assert op in ('fire_append_event', 'fire_remove_event') after = op if before: methods[name] = before[0], before[1], after elif after: methods[name] = None, None, after # apply ABC auto-decoration to methods that need it for method, decorator in decorators.items(): fn = getattr(cls, method, None) if (fn and method not in methods and not hasattr(fn, '_sa_instrumented')): setattr(cls, method, decorator(fn)) # ensure all roles are present, and apply implicit instrumentation if # needed if 'appender' not in roles or not hasattr(cls, roles['appender']): raise sa_exc.ArgumentError( "Type %s must elect an appender method to be " "a collection class" % cls.__name__) elif (roles['appender'] not in methods and not hasattr(getattr(cls, roles['appender']), '_sa_instrumented')): methods[roles['appender']] = ('fire_append_event', 1, None) if 'remover' not in roles or not hasattr(cls, roles['remover']): raise sa_exc.ArgumentError( "Type %s must elect a remover method to be " "a collection class" % cls.__name__) elif (roles['remover'] not in methods and not hasattr(getattr(cls, roles['remover']), '_sa_instrumented')): methods[roles['remover']] = ('fire_remove_event', 1, None) if 'iterator' not in roles or not hasattr(cls, roles['iterator']): raise sa_exc.ArgumentError( "Type %s must elect an iterator method to be " "a collection class" % cls.__name__) # apply ad-hoc instrumentation from decorators, class-level defaults # and implicit role declarations for method, (before, argument, after) in methods.items(): setattr(cls, method, _instrument_membership_mutator(getattr(cls, method), before, argument, after)) # intern the role map for role, method in roles.items(): setattr(cls, '_sa_%s' % role, getattr(cls, method)) setattr(cls, '_sa_instrumented', id(cls))
def visit_binary(self, binary, **kwargs): op = self.operator_string(binary.operator) if util.callable(op): return op(self.process(binary.left), self.process(binary.right), **binary.modifiers) else: return self.process(binary.left) + " " + op + " " + self.process(binary.right)
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError( "relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument)) ) assert isinstance(self.mapper, mapper.Mapper), self.mapper # accept callables for other attributes which may require deferred initialization for attr in ("order_by", "primaryjoin", "secondaryjoin", "secondary", "_foreign_keys", "remote_side"): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ("primaryjoin", "secondaryjoin"): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)] self._foreign_keys = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys) ) self.remote_side = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self.remote_side) ) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ( "Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush" ) % (self.key, self.parent, inheriting) ) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError( "In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." % (str(self)) ) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
class _AssociationDict(object): """Generic, converting, dict-to-dict proxy.""" def __init__(self, lazy_collection, creator, getter, setter): """Constructs an _AssociationDict. lazy_collection A callable returning a dict-based collection of entities (usually an object attribute managed by a SQLAlchemy relation()) creator A function that creates new target entities. Given two parameters: key and value. The assertion is assumed:: obj = creator(somekey, somevalue) assert getter(somekey) == somevalue getter A function. Given an associated object and a key, return the 'value'. setter A function. Given an associated object, a key and a value, store that value on the object. """ self.lazy_collection = lazy_collection self.creator = creator self.getter = getter self.setter = setter col = property(lambda self: self.lazy_collection()) def _create(self, key, value): return self.creator(key, value) def _get(self, object): return self.getter(object) def _set(self, object, key, value): return self.setter(object, key, value) def __len__(self): return len(self.col) def __nonzero__(self): if self.col: return True else: return False def __getitem__(self, key): return self._get(self.col[key]) def __setitem__(self, key, value): if key in self.col: self._set(self.col[key], key, value) else: self.col[key] = self._create(key, value) def __delitem__(self, key): del self.col[key] def __contains__(self, key): # testlib.pragma exempt:__hash__ return key in self.col def has_key(self, key): # testlib.pragma exempt:__hash__ return key in self.col def __iter__(self): return self.col.iterkeys() def clear(self): self.col.clear() def __eq__(self, other): return dict(self) == other def __ne__(self, other): return dict(self) != other def __lt__(self, other): return dict(self) < other def __le__(self, other): return dict(self) <= other def __gt__(self, other): return dict(self) > other def __ge__(self, other): return dict(self) >= other def __cmp__(self, other): return cmp(dict(self), other) def __repr__(self): return repr(dict(self.items())) def get(self, key, default=None): try: return self[key] except KeyError: return default def setdefault(self, key, default=None): if key not in self.col: self.col[key] = self._create(key, default) return default else: return self[key] def keys(self): return self.col.keys() def iterkeys(self): return self.col.iterkeys() def values(self): return [ self._get(member) for member in self.col.values() ] def itervalues(self): for key in self.col: yield self._get(self.col[key]) raise StopIteration def items(self): return [(k, self._get(self.col[k])) for k in self] def iteritems(self): for key in self.col: yield (key, self._get(self.col[key])) raise StopIteration def pop(self, key, default=_NotProvided): if default is _NotProvided: member = self.col.pop(key) else: member = self.col.pop(key, default) return self._get(member) def popitem(self): item = self.col.popitem() return (item[0], self._get(item[1])) def update(self, *a, **kw): if len(a) > 1: raise TypeError('update expected at most 1 arguments, got %i' % len(a)) elif len(a) == 1: seq_or_map = a[0] for item in seq_or_map: if isinstance(item, tuple): self[item[0]] = item[1] else: self[item] = seq_or_map[item] for key, value in kw: self[key] = value def copy(self): return dict(self.items()) def __hash__(self): raise TypeError("%s objects are unhashable" % type(self).__name__) for func_name, func in locals().items(): if (util.callable(func) and func.func_name == func_name and not func.__doc__ and hasattr(dict, func_name)): func.__doc__ = getattr(dict, func_name).__doc__ del func_name, func