def test_mutators_against_iter(self):
        # testing a set modified against an iterator
        o = util.OrderedSet([3, 2, 4, 5])

        eq_(o.difference(iter([3, 4])), util.OrderedSet([2, 5]))
        eq_(o.intersection(iter([3, 4, 6])), util.OrderedSet([3, 4]))
        eq_(o.union(iter([3, 4, 6])), util.OrderedSet([2, 3, 4, 5, 6]))
    def _revision_map(self):
        """memoized attribute, initializes the revision map from the
        initial collection.

        """
        map_ = {}

        heads = sqlautil.OrderedSet()
        _real_heads = sqlautil.OrderedSet()
        self.bases = ()
        self._real_bases = ()

        has_branch_labels = set()
        has_depends_on = set()
        for revision in self._generator():

            if revision.revision in map_:
                util.warn(
                    "Revision %s is present more than once" % revision.revision
                )
            map_[revision.revision] = revision
            if revision.branch_labels:
                has_branch_labels.add(revision)
            if revision.dependencies:
                has_depends_on.add(revision)
            heads.add(revision.revision)
            _real_heads.add(revision.revision)
            if revision.is_base:
                self.bases += (revision.revision,)
            if revision._is_real_base:
                self._real_bases += (revision.revision,)

        # add the branch_labels to the map_.  We'll need these
        # to resolve the dependencies.
        for revision in has_branch_labels:
            self._map_branch_labels(revision, map_)

        for revision in has_depends_on:
            self._add_depends_on(revision, map_)

        for rev in map_.values():
            for downrev in rev._all_down_revisions:
                if downrev not in map_:
                    util.warn(
                        "Revision %s referenced from %s is not present"
                        % (downrev, rev)
                    )
                down_revision = map_[downrev]
                down_revision.add_nextrev(rev)
                if downrev in rev._versioned_down_revisions:
                    heads.discard(downrev)
                _real_heads.discard(downrev)

        map_[None] = map_[()] = None
        self.heads = tuple(heads)
        self._real_heads = tuple(_real_heads)

        for revision in has_branch_labels:
            self._add_branches(revision, map_, map_branch_labels=False)
        return map_
Exemple #3
0
def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=True):
    """Create a ``UNION`` statement used by a polymorphic mapper.

    See  :ref:`concrete_inheritance` for an example of how
    this is used.

    :param table_map: mapping of polymorphic identities to
     :class:`.Table` objects.
    :param typecolname: string name of a "discriminator" column, which will be
     derived from the query, producing the polymorphic identity for each row.  If
     ``None``, no polymorphic discriminator is generated.
    :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()`
     construct generated.
    :param cast_nulls: if True, non-existent columns, which are represented as labeled
     NULLs, will be passed into CAST.   This is a legacy behavior that is problematic
     on some backends such as Oracle - in which case it can be set to False.

    """

    colnames = util.OrderedSet()
    colnamemaps = {}
    types = {}
    for key in list(table_map.keys()):
        table = table_map[key]

        # mysql doesnt like selecting from a select;
        # make it an alias of the select
        if isinstance(table, sql.Select):
            table = table.alias()
            table_map[key] = table

        m = {}
        for c in table.c:
            colnames.add(c.key)
            m[c.key] = c
            types[c.key] = c.type
        colnamemaps[table] = m

    def col(name, table):
        try:
            return colnamemaps[table][name]
        except KeyError:
            if cast_nulls:
                return sql.cast(sql.null(), types[name]).label(name)
            else:
                return sql.type_coerce(sql.null(), types[name]).label(name)

    result = []
    for type, table in table_map.items():
        if typecolname is not None:
            result.append(
                    sql.select([col(name, table) for name in colnames] +
                    [sql.literal_column(sql_util._quote_ddl_expr(type)).
                            label(typecolname)],
                             from_obj=[table]))
        else:
            result.append(sql.select([col(name, table) for name in colnames],
                                     from_obj=[table]))
    return sql.union_all(*result).alias(aliasname)
Exemple #4
0
    def __determine_remote_side(self):
        if self.remote_side:
            if self.direction is MANYTOONE:
                eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_referenced_keys=self.remote_side, any_operator=True)
            else:
                eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=self.remote_side, any_operator=True)

            if self.secondaryjoin:
                sq_pairs = criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=self.foreign_keys, any_operator=True)
                sq_pairs = [(l, r) for l, r in sq_pairs if self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)]
                eq_pairs += sq_pairs
        else:
            eq_pairs = zip(self._opposite_side, self.foreign_keys)

        if self.direction is MANYTOONE:
            self.remote_side, self.local_side = [util.OrderedSet(s) for s in zip(*eq_pairs)]
        else:
            self.local_side, self.remote_side = [util.OrderedSet(s) for s in zip(*eq_pairs)]
Exemple #5
0
    def __init__(self, identity_map=None):
        if identity_map is not None:
            self.identity_map = identity_map
        else:
            self.identity_map = weakref.WeakValueDictionary()

        self.new = util.OrderedSet()
        self.dirty = util.Set()

        self.deleted = util.Set()
Exemple #6
0
    def __determine_fks(self):
        if self._legacy_foreignkey and not self._refers_to_parent_table():
            self.foreign_keys = self._legacy_foreignkey

        arg_foreign_keys = self.foreign_keys
        
        eq_pairs = criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=self.viewonly)
        eq_pairs = [(l, r) for l, r in eq_pairs if self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)]

        if not eq_pairs:
            if not self.viewonly and criterion_as_pairs(self.primaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True):
                raise exceptions.ArgumentError("Could not locate any equated column pairs for primaryjoin condition '%s' on relation %s. "
                    "If no equated pairs exist, the relation must be marked as viewonly=True." % (self.primaryjoin, self)
                )
            else:
                raise exceptions.ArgumentError("Could not determine relation direction for primaryjoin condition '%s', on relation %s. "
                "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.primaryjoin, self))
        
        self.foreign_keys = util.OrderedSet([r for l, r in eq_pairs])
        self._opposite_side = util.OrderedSet([l for l, r in eq_pairs])
        self.synchronize_pairs = eq_pairs
        
        if self.secondaryjoin:
            sq_pairs = criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys)
            sq_pairs = [(l, r) for l, r in sq_pairs if self.__col_is_part_of_mappings(l) and self.__col_is_part_of_mappings(r)]
            
            if not sq_pairs:
                if not self.viewonly and criterion_as_pairs(self.secondaryjoin, consider_as_foreign_keys=arg_foreign_keys, any_operator=True):
                    raise exceptions.ArgumentError("Could not locate any equated column pairs for secondaryjoin condition '%s' on relation %s. "
                        "If no equated pairs exist, the relation must be marked as viewonly=True." % (self.secondaryjoin, self)
                    )
                else:
                    raise exceptions.ArgumentError("Could not determine relation direction for secondaryjoin condition '%s', on relation %s. "
                    "Specify the foreign_keys argument to indicate which columns on the relation are foreign." % (self.secondaryjoin, self))

            self.foreign_keys.update([r for l, r in sq_pairs])
            self._opposite_side.update([l for l, r in sq_pairs])
            self.secondary_synchronize_pairs = sq_pairs
        else:
            self.secondary_synchronize_pairs = None
Exemple #7
0
    def visit_select(self, select):
        fr = util.OrderedSet()
        for elem in select._froms:
            n = self.convert_element(elem)
            if n is not None:
                fr.add((elem, n))
        select._recorrelate_froms(fr)

        col = []
        for elem in select._raw_columns:
            n = self.convert_element(elem)
            if n is None:
                col.append(elem)
            else:
                col.append(n)
        select._raw_columns = col
Exemple #8
0
def reduce_columns(columns, *clauses):
    """given a list of columns, return a 'reduced' set based on natural equivalents.

    the set is reduced to the smallest list of columns which have no natural
    equivalent present in the list.  A "natural equivalent" means that two columns
    will ultimately represent the same value because they are related by a foreign key.

    \*clauses is an optional list of join clauses which will be traversed
    to further identify columns that are "equivalent".

    This function is primarily used to determine the most minimal "primary key"
    from a selectable, by reducing the set of primary key columns present
    in the the selectable to just those that are not repeated.

    """

    columns = util.OrderedSet(columns)

    omit = util.Set()
    for col in columns:
        for fk in col.foreign_keys:
            for c in columns:
                if c is col:
                    continue
                if fk.column.shares_lineage(c):
                    omit.add(col)
                    break

    if clauses:

        def visit_binary(binary):
            if binary.operator == operators.eq:
                cols = util.Set(
                    chain(*[c.proxy_set for c in columns.difference(omit)]))
                if binary.left in cols and binary.right in cols:
                    for c in columns:
                        if c.shares_lineage(binary.right):
                            omit.add(c)
                            break

        for clause in clauses:
            visitors.traverse(clause, visit_binary=visit_binary)

    return expression.ColumnSet(columns.difference(omit))
Exemple #9
0
    def _revision_map(self):
        """memoized attribute, initializes the revision map from the
        initial collection.

        """
        map_ = {}

        heads = sqlautil.OrderedSet()
        _real_heads = sqlautil.OrderedSet()
        bases = ()
        _real_bases = ()

        has_branch_labels = set()
        has_depends_on = set()
        for revision in self._generator():

            if revision.revision in map_:
                util.warn("Revision %s is present more than once" %
                          revision.revision)
            map_[revision.revision] = revision
            if revision.branch_labels:
                has_branch_labels.add(revision)
            if revision.dependencies:
                has_depends_on.add(revision)
            heads.add(revision)
            _real_heads.add(revision)
            if revision.is_base:
                bases += (revision, )
            if revision._is_real_base:
                _real_bases += (revision, )

        # add the branch_labels to the map_.  We'll need these
        # to resolve the dependencies.
        rev_map = map_.copy()
        for revision in has_branch_labels:
            self._map_branch_labels(revision, map_)

        for revision in has_depends_on:
            self._add_depends_on(revision, map_)

        for rev in map_.values():
            for downrev in rev._all_down_revisions:
                if downrev not in map_:
                    util.warn("Revision %s referenced from %s is not present" %
                              (downrev, rev))
                down_revision = map_[downrev]
                down_revision.add_nextrev(rev)
                if downrev in rev._versioned_down_revisions:
                    heads.discard(down_revision)
                _real_heads.discard(down_revision)

        if rev_map:
            if not heads or not bases:
                raise CycleDetected(rev_map.keys())
            total_space = {
                rev.revision
                for rev in self._iterate_related_revisions(
                    lambda r: r._versioned_down_revisions, heads, map_=rev_map)
            }.intersection(rev.revision
                           for rev in self._iterate_related_revisions(
                               lambda r: r.nextrev, bases, map_=rev_map))
            deleted_revs = set(rev_map.keys()) - total_space
            if deleted_revs:
                raise CycleDetected(sorted(deleted_revs))

            if not _real_heads or not _real_bases:
                raise DependencyCycleDetected(rev_map.keys())
            total_space = {
                rev.revision
                for rev in self._iterate_related_revisions(
                    lambda r: r._all_down_revisions, _real_heads, map_=rev_map)
            }.intersection(
                rev.revision for rev in self._iterate_related_revisions(
                    lambda r: r._all_nextrev, _real_bases, map_=rev_map))
            deleted_revs = set(rev_map.keys()) - total_space
            if deleted_revs:
                raise DependencyCycleDetected(sorted(deleted_revs))

        map_[None] = map_[()] = None
        self.heads = tuple(rev.revision for rev in heads)
        self._real_heads = tuple(rev.revision for rev in _real_heads)
        self.bases = tuple(rev.revision for rev in bases)
        self._real_bases = tuple(rev.revision for rev in _real_bases)

        for revision in has_branch_labels:
            self._add_branches(revision, map_, map_branch_labels=False)
        return map_
Exemple #10
0
    def visit_select(self, select, asfrom=False, parens=True, **kwargs):

        stack_entry = {'select':select}
        
        if asfrom:
            stack_entry['is_selected_from'] = stack_entry['is_subquery'] = True
        elif self.stack and self.stack[-1].get('select'):
            stack_entry['is_subquery'] = True

        if self.stack and self.stack[-1].get('from'):
            existingfroms = self.stack[-1]['from']
        else:
            existingfroms = None
        froms = select._get_display_froms(existingfroms)

        correlate_froms = util.Set()
        for f in froms:
            correlate_froms.add(f)
            for f2 in f._get_from_objects():
                correlate_froms.add(f2)

        # TODO: might want to propigate existing froms for select(select(select))
        # where innermost select should correlate to outermost
#        if existingfroms:
#            correlate_froms = correlate_froms.union(existingfroms)    
        stack_entry['from'] = correlate_froms
        self.stack.append(stack_entry)

        # the actual list of columns to print in the SELECT column list.
        inner_columns = util.OrderedSet()
                
        for co in select.inner_columns:
            if select.use_labels:
                labelname = co._label
                if labelname is not None:
                    l = co.label(labelname)
                    inner_columns.add(self.process(l))
                else:
                    inner_columns.add(self.process(co))
            else:
                l = self.label_select_column(select, co)
                if l is not None:
                    inner_columns.add(self.process(l))
                else:
                    inner_columns.add(self.process(co))
            
        collist = string.join(inner_columns.difference(util.Set([None])), ', ')

        text = " ".join(["SELECT"] + [self.process(x) for x in select._prefixes]) + " "
        text += self.get_select_precolumns(select)
        text += collist

        whereclause = select._whereclause

        from_strings = []
        for f in froms:
            from_strings.append(self.process(f, asfrom=True))

            w = self.get_whereclause(f)
            if w is not None:
                if whereclause is not None:
                    whereclause = sql.and_(w, whereclause)
                else:
                    whereclause = w

        if froms:
            text += " \nFROM "
            text += string.join(from_strings, ', ')
        else:
            text += self.default_from()

        if whereclause is not None:
            t = self.process(whereclause)
            if t:
                text += " \nWHERE " + t

        group_by = self.process(select._group_by_clause)
        if group_by:
            text += " GROUP BY " + group_by

        if select._having is not None:
            t = self.process(select._having)
            if t:
                text += " \nHAVING " + t
        
        text += self.order_by_clause(select)
        text += (select._limit or select._offset) and self.limit_clause(select) or ""
        text += self.for_update_clause(select)

        self.stack.pop(-1)

        if asfrom and parens:
            return "(" + text + ")"
        else:
            return text
Exemple #11
0
    def visit_select(self,
                     select,
                     asfrom=False,
                     parens=True,
                     iswrapper=False,
                     **kwargs):

        stack_entry = {'select': select}
        prev_entry = self.stack and self.stack[-1] or None

        if asfrom or (prev_entry and 'select' in prev_entry):
            stack_entry['is_subquery'] = True
            if prev_entry and 'iswrapper' in prev_entry:
                column_clause_args = {'result_map': self.result_map}
            else:
                column_clause_args = {}
        elif iswrapper:
            column_clause_args = {}
            stack_entry['iswrapper'] = True
        else:
            column_clause_args = {'result_map': self.result_map}

        if self.stack and 'from' in self.stack[-1]:
            existingfroms = self.stack[-1]['from']
        else:
            existingfroms = None

        froms = select._get_display_froms(existingfroms)

        correlate_froms = util.Set(
            itertools.chain(*([froms] + [f._get_from_objects()
                                         for f in froms])))

        # TODO: might want to propigate existing froms for select(select(select))
        # where innermost select should correlate to outermost
        #        if existingfroms:
        #            correlate_froms = correlate_froms.union(existingfroms)
        stack_entry['from'] = correlate_froms
        self.stack.append(stack_entry)

        # the actual list of columns to print in the SELECT column list.
        inner_columns = util.OrderedSet([
            c for c in [
                self.process(
                    self.label_select_column(select, co, asfrom=asfrom), **
                    column_clause_args) for co in select.inner_columns
            ] if c is not None
        ])

        text = " ".join(["SELECT"] +
                        [self.process(x) for x in select._prefixes]) + " "
        text += self.get_select_precolumns(select)
        text += ', '.join(inner_columns)

        from_strings = []
        for f in froms:
            from_strings.append(self.process(f, asfrom=True))

        if froms:
            text += " \nFROM "
            text += string.join(from_strings, ', ')
        else:
            text += self.default_from()

        if select._whereclause is not None:
            t = self.process(select._whereclause)
            if t:
                text += " \nWHERE " + t

        group_by = self.process(select._group_by_clause)
        if group_by:
            text += " GROUP BY " + group_by

        if select._having is not None:
            t = self.process(select._having)
            if t:
                text += " \nHAVING " + t

        text += self.order_by_clause(select)
        text += (select._limit
                 or select._offset) and self.limit_clause(select) or ""
        text += self.for_update_clause(select)

        self.stack.pop(-1)

        if asfrom and parens:
            return "(" + text + ")"
        else:
            return text
Exemple #12
0
    def _revision_map(self):
        """memoized attribute, initializes the revision map from the
        initial collection.

        """
        map_ = {}

        heads = sqlautil.OrderedSet()
        _real_heads = sqlautil.OrderedSet()
        bases = ()
        _real_bases = ()

        has_branch_labels = set()
        all_revisions = set()

        for revision in self._generator():
            all_revisions.add(revision)

            if revision.revision in map_:
                util.warn("Revision %s is present more than once" %
                          revision.revision)
            map_[revision.revision] = revision
            if revision.branch_labels:
                has_branch_labels.add(revision)

            heads.add(revision)
            _real_heads.add(revision)
            if revision.is_base:
                bases += (revision, )
            if revision._is_real_base:
                _real_bases += (revision, )

        # add the branch_labels to the map_.  We'll need these
        # to resolve the dependencies.
        rev_map = map_.copy()
        self._map_branch_labels(has_branch_labels, map_)

        # resolve dependency names from branch labels and symbolic
        # names
        self._add_depends_on(all_revisions, map_)

        for rev in map_.values():
            for downrev in rev._all_down_revisions:
                if downrev not in map_:
                    util.warn("Revision %s referenced from %s is not present" %
                              (downrev, rev))
                down_revision = map_[downrev]
                down_revision.add_nextrev(rev)
                if downrev in rev._versioned_down_revisions:
                    heads.discard(down_revision)
                _real_heads.discard(down_revision)

        # once the map has downrevisions populated, the dependencies
        # can be further refined to include only those which are not
        # already ancestors
        self._normalize_depends_on(all_revisions, map_)
        self._detect_cycles(rev_map, heads, bases, _real_heads, _real_bases)

        map_[None] = map_[()] = None
        self.heads = tuple(rev.revision for rev in heads)
        self._real_heads = tuple(rev.revision for rev in _real_heads)
        self.bases = tuple(rev.revision for rev in bases)
        self._real_bases = tuple(rev.revision for rev in _real_bases)

        self._add_branches(has_branch_labels, map_)
        return map_
Exemple #13
0
    def __determine_remote_side(self):
        if self._arg_local_remote_pairs:
            if self.remote_side:
                raise exceptions.ArgumentError(
                    "remote_side argument is redundant against more detailed _local_remote_side argument."
                )
            if self.direction is MANYTOONE:
                eq_pairs = [(r, l) for l, r in self._arg_local_remote_pairs]
            else:
                eq_pairs = self._arg_local_remote_pairs
        elif self.remote_side:
            remote_side = util.Set([
                expression._literal_as_column(x)
                for x in util.to_set(self.remote_side)
            ])

            if self.direction is MANYTOONE:
                eq_pairs = criterion_as_pairs(
                    self.primaryjoin,
                    consider_as_referenced_keys=remote_side,
                    any_operator=True)
            else:
                eq_pairs = criterion_as_pairs(
                    self.primaryjoin,
                    consider_as_foreign_keys=remote_side,
                    any_operator=True)
        else:
            if self.viewonly:
                eq_pairs = self.synchronize_pairs
            else:
                eq_pairs = criterion_as_pairs(
                    self.primaryjoin,
                    consider_as_foreign_keys=self.foreign_keys,
                    any_operator=True)
                if self.secondaryjoin:
                    sq_pairs = criterion_as_pairs(
                        self.secondaryjoin,
                        consider_as_foreign_keys=self.foreign_keys,
                        any_operator=True)
                    eq_pairs += sq_pairs
                eq_pairs = [(l, r) for l, r in eq_pairs
                            if self.__col_is_part_of_mappings(l)
                            and self.__col_is_part_of_mappings(r)]

        if self.direction is MANYTOONE:
            self.remote_side, self.local_side = [
                util.OrderedSet(s) for s in zip(*eq_pairs)
            ]
            self.local_remote_pairs = [(r, l) for l, r in eq_pairs]
        else:
            self.local_side, self.remote_side = [
                util.OrderedSet(s) for s in zip(*eq_pairs)
            ]
            self.local_remote_pairs = eq_pairs

        if self.direction is ONETOMANY:
            for l in self.local_side:
                if not self.__col_is_part_of_mappings(l):
                    raise exceptions.ArgumentError(
                        "Local column '%s' is not part of mapping %s.  Specify remote_side argument to indicate which column lazy join condition should compare against."
                        % (l, self.parent))
        elif self.direction is MANYTOONE:
            for r in self.remote_side:
                if not self.__col_is_part_of_mappings(r):
                    raise exceptions.ArgumentError(
                        "Remote column '%s' is not part of mapping %s.  Specify remote_side argument to indicate which column lazy join condition should bind."
                        % (r, self.mapper))
Exemple #14
0
    def __determine_fks(self):

        if self._legacy_foreignkey and not self._refers_to_parent_table():
            self.foreign_keys = self._legacy_foreignkey

        arg_foreign_keys = util.Set([
            expression._literal_as_column(x)
            for x in util.to_set(self.foreign_keys)
        ])

        if self._arg_local_remote_pairs:
            if not arg_foreign_keys:
                raise exceptions.ArgumentError(
                    "foreign_keys argument is required with _local_remote_pairs argument"
                )
            self.foreign_keys = util.OrderedSet(arg_foreign_keys)
            self._opposite_side = util.OrderedSet()
            for l, r in self._arg_local_remote_pairs:
                if r in self.foreign_keys:
                    self._opposite_side.add(l)
                elif l in self.foreign_keys:
                    self._opposite_side.add(r)
            self.synchronize_pairs = zip(self._opposite_side,
                                         self.foreign_keys)
        else:
            eq_pairs = criterion_as_pairs(
                self.primaryjoin,
                consider_as_foreign_keys=arg_foreign_keys,
                any_operator=self.viewonly)
            eq_pairs = [
                (l, r) for l, r in eq_pairs
                if (self.__col_is_part_of_mappings(l) and
                    self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys
            ]

            if not eq_pairs:
                if not self.viewonly and criterion_as_pairs(
                        self.primaryjoin,
                        consider_as_foreign_keys=arg_foreign_keys,
                        any_operator=True):
                    raise exceptions.ArgumentError(
                        "Could not locate any equated, locally mapped column pairs for primaryjoin condition '%s' on relation %s. "
                        "For more relaxed rules on join conditions, the relation may be marked as viewonly=True."
                        % (self.primaryjoin, self))
                else:
                    if arg_foreign_keys:
                        raise exceptions.ArgumentError(
                            "Could not determine relation direction for primaryjoin condition '%s', on relation %s. "
                            "Specify _local_remote_pairs=[(local, remote), (local, remote), ...] to explicitly establish the local/remote column pairs."
                            % (self.primaryjoin, self))
                    else:
                        raise exceptions.ArgumentError(
                            "Could not determine relation direction for primaryjoin condition '%s', on relation %s. "
                            "Specify the foreign_keys argument to indicate which columns on the relation are foreign."
                            % (self.primaryjoin, self))

            self.foreign_keys = util.OrderedSet([r for l, r in eq_pairs])
            self._opposite_side = util.OrderedSet([l for l, r in eq_pairs])
            self.synchronize_pairs = eq_pairs

        if self.secondaryjoin:
            sq_pairs = criterion_as_pairs(
                self.secondaryjoin,
                consider_as_foreign_keys=arg_foreign_keys,
                any_operator=self.viewonly)
            sq_pairs = [
                (l, r) for l, r in sq_pairs
                if (self.__col_is_part_of_mappings(l) and
                    self.__col_is_part_of_mappings(r)) or r in arg_foreign_keys
            ]

            if not sq_pairs:
                if not self.viewonly and criterion_as_pairs(
                        self.secondaryjoin,
                        consider_as_foreign_keys=arg_foreign_keys,
                        any_operator=True):
                    raise exceptions.ArgumentError(
                        "Could not locate any equated, locally mapped column pairs for secondaryjoin condition '%s' on relation %s. "
                        "For more relaxed rules on join conditions, the relation may be marked as viewonly=True."
                        % (self.secondaryjoin, self))
                else:
                    raise exceptions.ArgumentError(
                        "Could not determine relation direction for secondaryjoin condition '%s', on relation %s. "
                        "Specify the foreign_keys argument to indicate which columns on the relation are foreign."
                        % (self.secondaryjoin, self))

            self.foreign_keys.update([r for l, r in sq_pairs])
            self._opposite_side.update([l for l, r in sq_pairs])
            self.secondary_synchronize_pairs = sq_pairs
        else:
            self.secondary_synchronize_pairs = None