def prepare_tat(self): """ Prepare a 'tat' query Here we build a big 'windowed' subquery that calculates the deltas between the events """ partition_field = getattr(self.eventmodel, self.QBEventsConfig.get('partition_field', 'partition_field')) event_type_field = getattr(self.eventmodel, self.QBEventsConfig.get('event_type', 'event_type')) event_date_field = getattr(self.eventmodel, self.QBEventsConfig.get('event_date', 'event_date')) first_subq_filters = [] event_filters = [] if 'starting_event_choice' in self.query_parameters: ec = self.query_parameters.get('starting_event_choice') if not isinstance(ec, (list, tuple)): ec = [ec,] event_filters.extend(ec) first_subq_filters.append(_literal_as_column('main_event_type').in_(ec)) last_subq_filters = [] if 'ending_event_choice' in self.query_parameters: ec = self.query_parameters.get('ending_event_choice') if not isinstance(ec, (list, tuple)): ec = [ec,] event_filters.extend(ec) last_subq_filters.append(_literal_as_column('main_event_type').in_(ec)) if event_filters: event_filters = [event_type_field.in_(event_filters)] event_filters.extend(self.event_date_filters) self.main_subquery = self.session.query( (event_date_field - OverWindow(event_date_field, windowfunc = 'first_value', windowname = WindowClause(None, 'o', [partition_field], [event_date_field]))).label('main_event_delta'), partition_field.label('main_partition_field'), event_type_field.label('main_event_type'), ).filter(and_(*event_filters)) return True
def __determine_fks(self): if self._legacy_foreignkey and not self._refers_to_parent_table(): self.foreign_keys = self._legacy_foreignkey arg_foreign_keys = util.Set([expression._literal_as_column(x) for x in util.to_set(self.foreign_keys)]) if self._arg_local_remote_pairs: if not arg_foreign_keys: raise exceptions.ArgumentError("foreign_keys argument is required with _local_remote_pairs argument") self.foreign_keys = util.OrderedSet(arg_foreign_keys) self._opposite_side = util.OrderedSet() for l, r in self._arg_local_remote_pairs: if r in self.foreign_keys: self._opposite_side.add(l) elif l in self.foreign_keys: self._opposite_side.add(r) self.synchronize_pairs = zip(self._opposite_side, self.foreign_keys) else: eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=self.viewonly) eq_pairs = [(l, r) for l, r in eq_pairs if (self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys] if not eq_pairs: if not self.viewonly and criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True): raise exceptions.ArgumentError("Could not locate any equated, locally mapped column pairs for primaryjoin condition '%s' on relation %s. " "For more relaxed rules on join conditions, the relation may be marked as viewonly=True." % (self.primaryjoin, self) ) else: if arg_foreign_keys: raise exceptions.ArgumentError("Could not determine relation direction for primaryjoin condition '%s', on relation %s. " "Specify _local_remote_pairs=[(local, remote), (local, remote), ...] to explicitly establish the local/remote column pairs." % (self.primaryjoin, self)) else: raise exceptions.ArgumentError("Could not determine relation direction for primaryjoin condition '%s', on relation %s. " "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.primaryjoin, self)) self.foreign_keys = util.OrderedSet([r for l, r in eq_pairs]) self._opposite_side = util.OrderedSet([l for l, r in eq_pairs]) self.synchronize_pairs = eq_pairs if self.secondaryjoin: sq_pairs = criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=self.viewonly) sq_pairs = [(l, r) for l, r in sq_pairs if (self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys] if not sq_pairs: if not self.viewonly and criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True): raise exceptions.ArgumentError("Could not locate any equated, locally mapped column pairs for secondaryjoin condition '%s' on relation %s. " "For more relaxed rules on join conditions, the relation may be marked as viewonly=True." % (self.secondaryjoin, self) ) else: raise exceptions.ArgumentError("Could not determine relation direction for secondaryjoin condition '%s', on relation %s. " "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.secondaryjoin, self)) self.foreign_keys.update([r for l, r in sq_pairs]) self._opposite_side.update([l for l, r in sq_pairs]) self.secondary_synchronize_pairs = sq_pairs else: self.secondary_synchronize_pairs = None
def _process_dependent_arguments(self): # accept callables for other attributes which may require deferred initialization for attr in ('order_by', 'primaryjoin', 'secondaryjoin', 'secondary', '_foreign_keys', 'remote_side'): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ('primaryjoin', 'secondaryjoin'): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)] self._foreign_keys = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys)) self.remote_side = util.column_set(expression._literal_as_column(x) for x in util.to_column_set(self.remote_side)) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ("Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush") % (self.key, self.parent, inheriting)) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError("In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." %(str(self))) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
def visit_select_with_window(element, compiler, **kw): """ Compiles the WindowClause (see its doc) """ def repr_ordering_column(e): if isinstance(e, tuple): return "%s %s" % (_literal_as_column(e[0]).label(''), e[1]) return str(_literal_as_column(e).label('')) partition = ordering = "" if element.windowpartition: partition = "PARTITION BY %s" % ", ".join([str(_literal_as_column(e).label('')) for e in element.windowpartition]) if element.windowordering: ordering = "ORDER BY %s" % ", ".join([repr_ordering_column(e) for e in element.windowordering]) return "(%s)" % ( " ".join([partition, ordering,]) )
def __determine_remote_side(self): if self._arg_local_remote_pairs: if self.remote_side: raise exceptions.ArgumentError("remote_side argument is redundant against more detailed _local_remote_side argument.") if self.direction is MANYTOONE: eq_pairs = [(r, l) for l, r in self._arg_local_remote_pairs] else: eq_pairs = self._arg_local_remote_pairs elif self.remote_side: remote_side = util.Set([expression._literal_as_column(x) for x in util.to_set(self.remote_side)]) if self.direction is MANYTOONE: eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_referenced_keys=remote_side, any_operator=True) else: eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=remote_side, any_operator=True) else: if self.viewonly: eq_pairs = self.synchronize_pairs else: eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=self.foreign_keys, any_operator=True) if self.secondaryjoin: sq_pairs = criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=self.foreign_keys, any_operator=True) eq_pairs += sq_pairs eq_pairs = [(l, r) for l, r in eq_pairs if self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)] if self.direction is MANYTOONE: self.remote_side, self.local_side = [util.OrderedSet(s) for s in zip(*eq_pairs)] self.local_remote_pairs = [(r, l) for l, r in eq_pairs] else: self.local_side, self.remote_side = [util.OrderedSet(s) for s in zip(*eq_pairs)] self.local_remote_pairs = eq_pairs if self.direction is ONETOMANY: for l in self.local_side: if not self.__col_is_part_of_mappings(l): raise exceptions.ArgumentError("Local column '%s' is not part of mapping %s. Specify remote_side argument to indicate which column lazy join condition should compare against." % (l, self.parent)) elif self.direction is MANYTOONE: for r in self.remote_side: if not self.__col_is_part_of_mappings(r): raise exceptions.ArgumentError("Remote column '%s' is not part of mapping %s. Specify remote_side argument to indicate which column lazy join condition should bind." % (r, self.mapper))
def __determine_remote_side(self): if self._arg_local_remote_pairs: if self.remote_side: raise exceptions.ArgumentError( "remote_side argument is redundant against more detailed _local_remote_side argument." ) if self.direction is MANYTOONE: eq_pairs = [(r, l) for l, r in self._arg_local_remote_pairs] else: eq_pairs = self._arg_local_remote_pairs elif self.remote_side: remote_side = util.Set([ expression._literal_as_column(x) for x in util.to_set(self.remote_side) ]) if self.direction is MANYTOONE: eq_pairs = criterion_as_pairs( self.primaryjoin, consider_as_referenced_keys=remote_side, any_operator=True) else: eq_pairs = criterion_as_pairs( self.primaryjoin, consider_as_foreign_keys=remote_side, any_operator=True) else: if self.viewonly: eq_pairs = self.synchronize_pairs else: eq_pairs = criterion_as_pairs( self.primaryjoin, consider_as_foreign_keys=self.foreign_keys, any_operator=True) if self.secondaryjoin: sq_pairs = criterion_as_pairs( self.secondaryjoin, consider_as_foreign_keys=self.foreign_keys, any_operator=True) eq_pairs += sq_pairs eq_pairs = [(l, r) for l, r in eq_pairs if self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)] if self.direction is MANYTOONE: self.remote_side, self.local_side = [ util.OrderedSet(s) for s in zip(*eq_pairs) ] self.local_remote_pairs = [(r, l) for l, r in eq_pairs] else: self.local_side, self.remote_side = [ util.OrderedSet(s) for s in zip(*eq_pairs) ] self.local_remote_pairs = eq_pairs if self.direction is ONETOMANY: for l in self.local_side: if not self.__col_is_part_of_mappings(l): raise exceptions.ArgumentError( "Local column '%s' is not part of mapping %s. Specify remote_side argument to indicate which column lazy join condition should compare against." % (l, self.parent)) elif self.direction is MANYTOONE: for r in self.remote_side: if not self.__col_is_part_of_mappings(r): raise exceptions.ArgumentError( "Remote column '%s' is not part of mapping %s. Specify remote_side argument to indicate which column lazy join condition should bind." % (r, self.mapper))
def __determine_fks(self): if self._legacy_foreignkey and not self._refers_to_parent_table(): self.foreign_keys = self._legacy_foreignkey arg_foreign_keys = util.Set([ expression._literal_as_column(x) for x in util.to_set(self.foreign_keys) ]) if self._arg_local_remote_pairs: if not arg_foreign_keys: raise exceptions.ArgumentError( "foreign_keys argument is required with _local_remote_pairs argument" ) self.foreign_keys = util.OrderedSet(arg_foreign_keys) self._opposite_side = util.OrderedSet() for l, r in self._arg_local_remote_pairs: if r in self.foreign_keys: self._opposite_side.add(l) elif l in self.foreign_keys: self._opposite_side.add(r) self.synchronize_pairs = zip(self._opposite_side, self.foreign_keys) else: eq_pairs = criterion_as_pairs( self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=self.viewonly) eq_pairs = [ (l, r) for l, r in eq_pairs if (self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys ] if not eq_pairs: if not self.viewonly and criterion_as_pairs( self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True): raise exceptions.ArgumentError( "Could not locate any equated, locally mapped column pairs for primaryjoin condition '%s' on relation %s. " "For more relaxed rules on join conditions, the relation may be marked as viewonly=True." % (self.primaryjoin, self)) else: if arg_foreign_keys: raise exceptions.ArgumentError( "Could not determine relation direction for primaryjoin condition '%s', on relation %s. " "Specify _local_remote_pairs=[(local, remote), (local, remote), ...] to explicitly establish the local/remote column pairs." % (self.primaryjoin, self)) else: raise exceptions.ArgumentError( "Could not determine relation direction for primaryjoin condition '%s', on relation %s. " "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.primaryjoin, self)) self.foreign_keys = util.OrderedSet([r for l, r in eq_pairs]) self._opposite_side = util.OrderedSet([l for l, r in eq_pairs]) self.synchronize_pairs = eq_pairs if self.secondaryjoin: sq_pairs = criterion_as_pairs( self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=self.viewonly) sq_pairs = [ (l, r) for l, r in sq_pairs if (self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys ] if not sq_pairs: if not self.viewonly and criterion_as_pairs( self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True): raise exceptions.ArgumentError( "Could not locate any equated, locally mapped column pairs for secondaryjoin condition '%s' on relation %s. " "For more relaxed rules on join conditions, the relation may be marked as viewonly=True." % (self.secondaryjoin, self)) else: raise exceptions.ArgumentError( "Could not determine relation direction for secondaryjoin condition '%s', on relation %s. " "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.secondaryjoin, self)) self.foreign_keys.update([r for l, r in sq_pairs]) self._opposite_side.update([l for l, r in sq_pairs]) self.secondary_synchronize_pairs = sq_pairs else: self.secondary_synchronize_pairs = None
def _determine_targets(self): if isinstance(self.argument, type): self.mapper = mapper.class_mapper(self.argument, compile=False) elif isinstance(self.argument, mapper.Mapper): self.mapper = self.argument elif util.callable(self.argument): # accept a callable to suit various deferred-configurational schemes self.mapper = mapper.class_mapper(self.argument(), compile=False) else: raise sa_exc.ArgumentError( "relation '%s' expects a class or a mapper argument (received: %s)" % (self.key, type(self.argument)) ) assert isinstance(self.mapper, mapper.Mapper), self.mapper # accept callables for other attributes which may require deferred initialization for attr in ("order_by", "primaryjoin", "secondaryjoin", "secondary", "_foreign_keys", "remote_side"): if util.callable(getattr(self, attr)): setattr(self, attr, getattr(self, attr)()) # in the case that InstrumentedAttributes were used to construct # primaryjoin or secondaryjoin, remove the "_orm_adapt" annotation so these # interact with Query in the same way as the original Table-bound Column objects for attr in ("primaryjoin", "secondaryjoin"): val = getattr(self, attr) if val is not None: util.assert_arg_type(val, sql.ClauseElement, attr) setattr(self, attr, _orm_deannotate(val)) if self.order_by: self.order_by = [expression._literal_as_column(x) for x in util.to_list(self.order_by)] self._foreign_keys = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self._foreign_keys) ) self.remote_side = util.column_set( expression._literal_as_column(x) for x in util.to_column_set(self.remote_side) ) if not self.parent.concrete: for inheriting in self.parent.iterate_to_root(): if inheriting is not self.parent and inheriting._get_property(self.key, raiseerr=False): util.warn( ( "Warning: relation '%s' on mapper '%s' supercedes " "the same relation on inherited mapper '%s'; this " "can cause dependency issues during flush" ) % (self.key, self.parent, inheriting) ) # TODO: remove 'self.table' self.target = self.table = self.mapper.mapped_table if self.cascade.delete_orphan: if self.parent.class_ is self.mapper.class_: raise sa_exc.ArgumentError( "In relationship '%s', can't establish 'delete-orphan' cascade " "rule on a self-referential relationship. " "You probably want cascade='all', which includes delete cascading but not orphan detection." % (str(self)) ) self.mapper.primary_mapper().delete_orphans.append((self.key, self.parent.class_))
def repr_ordering_column(e): if isinstance(e, tuple): return "%s %s" % (_literal_as_column(e[0]).label(''), e[1]) return str(_literal_as_column(e).label(''))
def prepare_query_event_delta(self): """ Prepare a 'event_delta' query. Here we build a big 'windowed' subquery that calculates the delta between the first event of one type and the last event of another type. This subquery will be used later in build_events_query. """ # Build the 'first_event_of_this_type' and 'last_event_of_this_type' queries # We need to know the 'event model' fields. Grab it from QBEventsConfig partition_field = getattr(self.eventmodel, self.QBEventsConfig.get('partition_field', 'partition_field')) event_type_field = getattr(self.eventmodel, self.QBEventsConfig.get('event_type', 'event_type')) event_date_field = getattr(self.eventmodel, self.QBEventsConfig.get('event_date', 'event_date')) # This 'main_subquery' calculates the delta between the 'current' event and the first event # for the current 'partition_field' entity self.main_subquery = self.session.query( partition_field.label('main_partition_field'), event_type_field.label('main_event_type'), event_date_field.label('main_event_date'), (event_date_field - OverWindow(event_date_field, windowfunc = 'first_value', windowname = WindowClause(None, 'o', [partition_field], [event_date_field]))).label('main_event_delta') ).filter(and_(*self.event_date_filters)).subquery() # Prepare the two window clauses first_event_window = WindowClause(None, 'first', [_literal_as_column('main_partition_field'), _literal_as_column('main_event_type')], [(_literal_as_column('main_event_date'), 'ASC')]) last_event_window = WindowClause(None, 'last', [_literal_as_column('main_partition_field'), _literal_as_column('main_event_type')], [(_literal_as_column('main_event_date'), 'DESC')]) # Build the two subqueries (One for 'first event of this type' and the other for 'last event of this type') first_subq_filters = [] if 'starting_event_choice' in self.query_parameters: ec = self.query_parameters.get('starting_event_choice') if not isinstance(ec, (list, tuple)): ec = [ec,] first_subq_filters.append(_literal_as_column('main_event_type').in_(ec)) first_subq = self.session.query( OverWindow(_literal_as_column('main_partition_field'), windowfunc = 'first_value', windowname = first_event_window).label('first_event_partition_field'), OverWindow(_literal_as_column('main_event_type'), windowfunc = 'first_value', windowname = first_event_window).label('first_event_type'), OverWindow(_literal_as_column('main_event_date'), windowfunc = 'first_value', windowname = first_event_window).label('first_event_date'), OverWindow(_literal_as_column('main_event_delta'), windowfunc = 'first_value', windowname = first_event_window).label('first_event_delta'), ).select_from(self.main_subquery).filter(and_(*first_subq_filters)).distinct().subquery() last_subq_filters = [] if 'ending_event_choice' in self.query_parameters: ec = self.query_parameters.get('ending_event_choice') if not isinstance(ec, (list, tuple)): ec = [ec,] last_subq_filters.append(_literal_as_column('main_event_type').in_(ec)) last_subq = self.session.query( OverWindow(_literal_as_column('main_partition_field'), windowfunc = 'first_value', windowname = last_event_window).label('last_event_partition_field'), OverWindow(_literal_as_column('main_event_type'), windowfunc = 'first_value', windowname = last_event_window).label('last_event_type'), OverWindow(_literal_as_column('main_event_date'), windowfunc = 'first_value', windowname = last_event_window).label('last_event_date'), OverWindow(_literal_as_column('main_event_delta'), windowfunc = 'first_value', windowname = last_event_window).label('last_event_delta'), ).select_from(self.main_subquery).filter(and_(*last_subq_filters)).distinct().subquery() # Join them joined = first_subq.join(last_subq, _literal_as_column('first_event_partition_field') == _literal_as_column('last_event_partition_field')) # And generate the events_query that calculates the delta between the first # event of a type and the last event of another type # We only keep values > 0 because we don't want to reverse time self.events_query = self.session.query( _literal_as_column('first_event_partition_field'), _literal_as_column('first_event_date'), _literal_as_column('last_event_date'), _literal_as_column('first_event_type'), _literal_as_column('last_event_type'), _literal_as_column('first_event_delta'), _literal_as_column('last_event_delta'), (_literal_as_column('last_event_delta') - _literal_as_column('first_event_delta')).label('deltas_delta') ).select_from(joined) self.events_query = self.events_query.filter((_literal_as_column('last_event_delta') - _literal_as_column('first_event_delta')) > '0').order_by('last_event_date').subquery() self.select.insert(0, partition_field) # We HAVE TO select this field return True