Exemplo n.º 1
0
 def __init__(self):
     from test.bootstrap.config import options
     self.write = options.write_profiles
     dirname, fname = os.path.split(__file__)
     self.short_fname = "profiles.txt"
     self.fname = os.path.join(dirname, self.short_fname)
     self.data = util.defaultdict(lambda: util.defaultdict(dict))
     self._read()
     if self.write:
         # rewrite for the case where features changed,
         # etc.
         self._write()
Exemplo n.º 2
0
 def __init__(self):
     from test.bootstrap.config import options
     self.write = options.write_profiles
     dirname, fname = os.path.split(__file__)
     self.short_fname = "profiles.txt"
     self.fname = os.path.join(dirname, self.short_fname)
     self.data = util.defaultdict(lambda: util.defaultdict(dict))
     self._read()
     if self.write:
         # rewrite for the case where features changed,
         # etc.
         self._write()
Exemplo n.º 3
0
    def get_indexes(self, connection, table_name, schema=None, **kw):
        qry = """
        SELECT ix.rdb$index_name AS index_name,
               ix.rdb$unique_flag AS unique_flag,
               ic.rdb$field_name AS field_name
        FROM rdb$indices ix
             JOIN rdb$index_segments ic
                  ON ix.rdb$index_name=ic.rdb$index_name
             LEFT OUTER JOIN rdb$relation_constraints
                  ON rdb$relation_constraints.rdb$index_name =
                        ic.rdb$index_name
        WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
          AND rdb$relation_constraints.rdb$constraint_type IS NULL
        ORDER BY index_name, field_name
        """
        c = connection.execute(qry, [self.denormalize_name(table_name)])

        indexes = util.defaultdict(dict)
        for row in c:
            indexrec = indexes[row['index_name']]
            if 'name' not in indexrec:
                indexrec['name'] = self.normalize_name(row['index_name'])
                indexrec['column_names'] = []
                indexrec['unique'] = bool(row['unique_flag'])

            indexrec['column_names'].append(
                                self.normalize_name(row['field_name']))

        return indexes.values()
Exemplo n.º 4
0
def _collect_delete_commands(base_mapper, uowtransaction, table, 
                                states_to_delete):
    """Identify values to use in DELETE statements for a list of 
    states to be deleted."""

    delete = util.defaultdict(list)

    for state, state_dict, mapper, has_identity, connection \
                                        in states_to_delete:
        if not has_identity or table not in mapper._pks_by_table:
            continue

        params = {}
        delete[connection].append(params)
        for col in mapper._pks_by_table[table]:
            params[col.key] = \
                    value = \
                    mapper._get_state_attr_by_column(
                                    state, state_dict, col)
            if value is None:
                raise sa_exc.FlushError(
                            "Can't delete from table "
                            "using NULL for primary "
                            "key value")

        if mapper.version_id_col is not None and \
                    table.c.contains_column(mapper.version_id_col):
            params[mapper.version_id_col.key] = \
                        mapper._get_committed_state_attr_by_column(
                                state, state_dict,
                                mapper.version_id_col)
    return delete
Exemplo n.º 5
0
def find_cycles(tuples, allitems):
    # straight from gvr with some mods
    todo = set(allitems)

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[parent].add(child)

    output = set()

    while todo:
        node = todo.pop()
        stack = [node]
        while stack:
            top = stack[-1]
            for node in edges[top]:
                if node in stack:
                    cyc = stack[stack.index(node):]
                    todo.difference_update(cyc)
                    output.update(cyc)

                if node in todo:
                    stack.append(node)
                    todo.remove(node)
                    break
            else:
                node = stack.pop()
    return output
Exemplo n.º 6
0
def find_cycles(tuples, allitems):
    # straight from gvr with some mods
    todo = set(allitems)

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[parent].add(child)

    output = set()

    while todo:
        node = todo.pop()
        stack = [node]
        while stack:
            top = stack[-1]
            for node in edges[top]:
                if node in stack:
                    cyc = stack[stack.index(node):]
                    todo.difference_update(cyc)
                    output.update(cyc)

                if node in todo:
                    stack.append(node)
                    todo.remove(node)
                    break
            else:
                node = stack.pop()
    return output
Exemplo n.º 7
0
    def get_indexes(self, connection, table_name, schema=None, **kw):
        qry = """
        SELECT ix.rdb$index_name AS index_name,
               ix.rdb$unique_flag AS unique_flag,
               ic.rdb$field_name AS field_name
        FROM rdb$indices ix
             JOIN rdb$index_segments ic
                  ON ix.rdb$index_name=ic.rdb$index_name
             LEFT OUTER JOIN rdb$relation_constraints
                  ON rdb$relation_constraints.rdb$index_name =
                        ic.rdb$index_name
        WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
          AND rdb$relation_constraints.rdb$constraint_type IS NULL
        ORDER BY index_name, ic.rdb$field_position
        """
        c = connection.execute(qry, [self.denormalize_name(table_name)])

        indexes = util.defaultdict(dict)
        for row in c:
            indexrec = indexes[row["index_name"]]
            if "name" not in indexrec:
                indexrec["name"] = self.normalize_name(row["index_name"])
                indexrec["column_names"] = []
                indexrec["unique"] = bool(row["unique_flag"])

            indexrec["column_names"].append(
                self.normalize_name(row["field_name"]))

        return list(indexes.values())
Exemplo n.º 8
0
    def get_foreign_keys(self, connection, tablename, schema=None, **kw):
        current_schema = schema or self.default_schema_name
        # Add constraints
        RR = ischema.ref_constraints  #information_schema.referential_constraints
        TC = ischema.constraints  #information_schema.table_constraints
        C = ischema.key_constraints.alias(
            'C')  # information_schema.constraint_column_usage:
        # the constrained column
        R = ischema.key_constraints.alias(
            'R')  # information_schema.constraint_column_usage:
        # the referenced column

        # Foreign key constraints
        s = sql.select([
            C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name,
            RR.c.constraint_name, RR.c.match_option, RR.c.update_rule,
            RR.c.delete_rule
        ],
                       sql.and_(
                           C.c.table_name == tablename,
                           C.c.table_schema == current_schema,
                           C.c.constraint_name == RR.c.constraint_name,
                           R.c.constraint_name == RR.c.unique_constraint_name,
                           C.c.ordinal_position == R.c.ordinal_position),
                       order_by=[RR.c.constraint_name, R.c.ordinal_position])

        # group rows by constraint ID, to handle multi-column FKs
        fkeys = []
        fknm, scols, rcols = (None, [], [])

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for r in connection.execute(s).fetchall():
            scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r

            rec = fkeys[rfknm]
            rec['name'] = rfknm
            if not rec['referred_table']:
                rec['referred_table'] = rtbl

                if schema is not None or current_schema != rschema:
                    rec['referred_schema'] = rschema

            local_cols, remote_cols = rec['constrained_columns'], rec[
                'referred_columns']

            local_cols.append(scol)
            remote_cols.append(rcol)

        return fkeys.values()
Exemplo n.º 9
0
    def __init__(self, session):
        self.session = session
        self.mapper_flush_opts = session._mapper_flush_opts

        # dictionary used by external actors to
        # store arbitrary state information.
        self.attributes = {}

        # dictionary of mappers to sets of
        # DependencyProcessors, which are also
        # set to be part of the sorted flush actions,
        # which have that mapper as a parent.
        self.deps = util.defaultdict(set)

        # dictionary of mappers to sets of InstanceState
        # items pending for flush which have that mapper
        # as a parent.
        self.mappers = util.defaultdict(set)

        # a dictionary of Preprocess objects, which gather
        # additional states impacted by the flush
        # and determine if a flush action is needed
        self.presort_actions = {}

        # dictionary of PostSortRec objects, each
        # one issues work during the flush within
        # a certain ordering.
        self.postsort_actions = {}

        # a set of 2-tuples, each containing two
        # PostSortRec objects where the second
        # is dependent on the first being executed
        # first
        self.dependencies = set()

        # dictionary of InstanceState-> (isdelete, listonly)
        # tuples, indicating if this state is to be deleted
        # or insert/updated, or just refreshed
        self.states = {}

        # tracks InstanceStates which will be receiving
        # a "post update" call.  Keys are mappers,
        # values are a set of states and a set of the
        # columns which should be included in the update.
        self.post_update_states = util.defaultdict(lambda: (set(), set()))
Exemplo n.º 10
0
    def __init__(self, session):
        self.session = session
        self.mapper_flush_opts = session._mapper_flush_opts

        # dictionary used by external actors to 
        # store arbitrary state information.
        self.attributes = {}

        # dictionary of mappers to sets of 
        # DependencyProcessors, which are also 
        # set to be part of the sorted flush actions,
        # which have that mapper as a parent.
        self.deps = util.defaultdict(set)

        # dictionary of mappers to sets of InstanceState
        # items pending for flush which have that mapper
        # as a parent.
        self.mappers = util.defaultdict(set)

        # a dictionary of Preprocess objects, which gather
        # additional states impacted by the flush
        # and determine if a flush action is needed
        self.presort_actions = {}

        # dictionary of PostSortRec objects, each 
        # one issues work during the flush within
        # a certain ordering.
        self.postsort_actions = {}

        # a set of 2-tuples, each containing two
        # PostSortRec objects where the second
        # is dependent on the first being executed
        # first
        self.dependencies = set()

        # dictionary of InstanceState-> (isdelete, listonly)
        # tuples, indicating if this state is to be deleted
        # or insert/updated, or just refreshed
        self.states = {}

        # tracks InstanceStates which will be receiving
        # a "post update" call.  Keys are mappers,
        # values are a set of states and a set of the 
        # columns which should be included in the update.
        self.post_update_states = util.defaultdict(lambda: (set(), set()))
Exemplo n.º 11
0
    def get_foreign_keys(self, connection, tablename, schema=None, **kw):
        current_schema = schema or self.default_schema_name
        # Add constraints
        RR = ischema.ref_constraints    #information_schema.referential_constraints
        TC = ischema.constraints        #information_schema.table_constraints
        C  = ischema.key_constraints.alias('C') # information_schema.constraint_column_usage: 
                                                # the constrained column
        R  = ischema.key_constraints.alias('R') # information_schema.constraint_column_usage: 
                                                # the referenced column

        # Foreign key constraints
        s = sql.select([C.c.column_name,
                        R.c.table_schema, R.c.table_name, R.c.column_name,
                        RR.c.constraint_name, RR.c.match_option, RR.c.update_rule,
                        RR.c.delete_rule],
                       sql.and_(C.c.table_name == tablename,
                                C.c.table_schema == current_schema,
                                C.c.constraint_name == RR.c.constraint_name,
                                R.c.constraint_name == RR.c.unique_constraint_name,
                                C.c.ordinal_position == R.c.ordinal_position
                                ),
                       order_by = [RR.c.constraint_name, R.c.ordinal_position])
        

        # group rows by constraint ID, to handle multi-column FKs
        fkeys = []
        fknm, scols, rcols = (None, [], [])
        
        def fkey_rec():
            return {
                'name' : None,
                'constrained_columns' : [],
                'referred_schema' : None,
                'referred_table' : None,
                'referred_columns' : []
            }

        fkeys = util.defaultdict(fkey_rec)
        
        for r in connection.execute(s).fetchall():
            scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r

            rec = fkeys[rfknm]
            rec['name'] = rfknm
            if not rec['referred_table']:
                rec['referred_table'] = rtbl

                if schema is not None or current_schema != rschema:
                    rec['referred_schema'] = rschema
            
            local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']
            
            local_cols.append(scol)
            remote_cols.append(rcol)

        return fkeys.values()
Exemplo n.º 12
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_sel = schema or self.default_schema_name
        c = connection.execute(
            """SELECT t1.constrname AS cons_name,
                 t4.colname AS local_column, t7.tabname AS remote_table,
                 t6.colname AS remote_column, t7.owner AS remote_owner,
                 t5.delrule
            FROM "informix".sysconstraints AS t1 , "informix".systables AS t2 ,
                 "informix".sysindexes AS t3 , "informix".syscolumns AS t4 ,
                 "informix".sysreferences AS t5 , "informix".syscolumns AS t6 , "informix".systables AS t7 ,
                 "informix".sysconstraints AS t8 , "informix".sysindexes AS t9
           WHERE t1.tabid = t2.tabid AND t2.tabname=? AND t2.owner=? AND t1.constrtype = 'R'
             AND t3.tabid = t2.tabid AND t3.idxname = t1.idxname
             AND t4.tabid = t2.tabid AND t4.colno in (t3.part1, t3.part2, t3.part3,
             t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
             AND t5.constrid = t1.constrid AND t8.constrid = t5.primary
             AND t6.tabid = t5.ptabid AND t6.colno in (t9.part1, t9.part2, t9.part3,
             t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
             t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) AND t9.idxname =
             t8.idxname
             AND t7.tabid = t5.ptabid""", table_name, schema_sel)

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': [],
                'options': {}
            }

        fkeys = util.defaultdict(fkey_rec)

        rows = c.fetchall()
        for cons_name, local_column, remote_table, remote_column, remote_owner, delrule in rows:

            rec = fkeys[cons_name]
            rec['name'] = cons_name
            local_cols, remote_cols = rec['constrained_columns'], rec[
                'referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = remote_table
                if schema is not None:
                    rec['referred_schema'] = remote_owner

            if local_column not in local_cols:
                local_cols.append(local_column)
            if remote_column not in remote_cols:
                remote_cols.append(remote_column)
            if delrule == 'C':
                rec['options']['ondelete'] = 'CASCADE'

        return list(fkeys.values())
Exemplo n.º 13
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_sel = schema or self.default_schema_name
        c = connection.execute(
        """select t1.constrname as cons_name,
                 t4.colname as local_column, t7.tabname as remote_table,
                 t6.colname as remote_column, t7.owner as remote_owner 
            from sysconstraints as t1 , systables as t2 ,
                 sysindexes as t3 , syscolumns as t4 ,
                 sysreferences as t5 , syscolumns as t6 , systables as t7 ,
                 sysconstraints as t8 , sysindexes as t9
           where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
             and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
             t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) 
             and t5.constrid = t1.constrid and t8.constrid = t5.primary
             and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
             t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
             t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
             t8.idxname
             and t7.tabid = t5.ptabid""", table_name, schema_sel)


        def fkey_rec():
            return {
                 'name' : None,
                 'constrained_columns' : [],
                 'referred_schema' : None,
                 'referred_table' : None,
                 'referred_columns' : []
             }

        fkeys = util.defaultdict(fkey_rec)

        rows = c.fetchall()
        for cons_name, local_column, \
                    remote_table, remote_column, remote_owner in rows:

            rec = fkeys[cons_name]
            rec['name'] = cons_name
            local_cols, remote_cols = \
                        rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = remote_table
                if schema is not None:
                    rec['referred_schema'] = remote_owner

            if local_column not in local_cols:
                local_cols.append(local_column)
            if remote_column not in remote_cols:
                remote_cols.append(remote_column)

        return fkeys.values()
Exemplo n.º 14
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_sel = schema or self.default_schema_name
        c = connection.execute(
        """select t1.constrname as cons_name,
                 t4.colname as local_column, t7.tabname as remote_table,
                 t6.colname as remote_column, t7.owner as remote_owner 
            from sysconstraints as t1 , systables as t2 ,
                 sysindexes as t3 , syscolumns as t4 ,
                 sysreferences as t5 , syscolumns as t6 , systables as t7 ,
                 sysconstraints as t8 , sysindexes as t9
           where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
             and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
             t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
             t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16) 
             and t5.constrid = t1.constrid and t8.constrid = t5.primary
             and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
             t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
             t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
             t8.idxname
             and t7.tabid = t5.ptabid""", table_name, schema_sel)


        def fkey_rec():
            return {
                 'name' : None,
                 'constrained_columns' : [],
                 'referred_schema' : None,
                 'referred_table' : None,
                 'referred_columns' : []
             }

        fkeys = util.defaultdict(fkey_rec)

        rows = c.fetchall()
        for cons_name, local_column, \
                    remote_table, remote_column, remote_owner in rows:

            rec = fkeys[cons_name]
            rec['name'] = cons_name
            local_cols, remote_cols = \
                        rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = remote_table
                if schema is not None:
                    rec['referred_schema'] = remote_owner

            if local_column not in local_cols:
                local_cols.append(local_column)
            if remote_column not in remote_cols:
                remote_cols.append(remote_column)

        return fkeys.values()
Exemplo n.º 15
0
def conforms_partial_ordering(tuples, sorted_elements):
    """True if the given sorting conforms to the given partial ordering."""

    deps = defaultdict(set)
    for parent, child in tuples:
        deps[parent].add(child)
    for i, node in enumerate(sorted_elements):
        for n in sorted_elements[i:]:
            if node in deps[n]:
                return False
    else:
        return True
Exemplo n.º 16
0
def conforms_partial_ordering(tuples, sorted_elements):
    """True if the given sorting conforms to the given partial ordering."""

    deps = defaultdict(set)
    for parent, child in tuples:
        deps[parent].add(child)
    for i, node in enumerate(sorted_elements):
        for n in sorted_elements[i:]:
            if node in deps[n]:
                return False
    else:
        return True
Exemplo n.º 17
0
 def assert_sort(self, tuples, allitems=None):
     if allitems is None:
         allitems = self._nodes_from_tuples(tuples)
     else:
         allitems = self._nodes_from_tuples(tuples).union(allitems)
     result = list(topological.sort(tuples, allitems))
     deps = util.defaultdict(set)
     for parent, child in tuples:
         deps[parent].add(child)
     assert len(result)
     for i, node in enumerate(result):
         for n in result[i:]:
             assert node not in deps[n]
 def assert_sort(self, tuples, allitems=None):
     if allitems is None:
         allitems = self._nodes_from_tuples(tuples)
     else:
         allitems = self._nodes_from_tuples(tuples).union(allitems)
     result = list(topological.sort(tuples, allitems))
     deps = util.defaultdict(set)
     for parent, child in tuples:
         deps[parent].add(child)
     assert len(result)
     for i, node in enumerate(result):
         for n in result[i:]:
             assert node not in deps[n]
Exemplo n.º 19
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        if table_name is None:
            return []
        schema_int = self._get_schema_for_input_or_current(connection, schema)

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)
        odbc_connection = self.getODBCConnection(connection)
        if odbc_connection is not None and not self.use_sql_fallback(**kw):
            constraints = self._get_foreign_keys_odbc(connection,
                                                      odbc_connection,
                                                      table_name=table_name,
                                                      schema=schema_int,
                                                      **kw)
        else:
            constraints = self._get_foreign_keys_sql(connection,
                                                     table_name=table_name,
                                                     schema=schema_int,
                                                     **kw)
        table_name = self.denormalize_name(table_name)
        for row in constraints:
            (cons_name, local_column, remote_schema, remote_table, remote_column) = \
                (row[0], row[1], row[2], row[3], row[4])
            rec = fkeys[self.normalize_name(cons_name)]
            rec['name'] = self.normalize_name(cons_name)
            local_cols, remote_cols = rec['constrained_columns'], rec[
                'referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = self.normalize_name(remote_table)
                # we need to take care of calls without schema. the sqla test suite
                # expects referred_schema to be None if None is passed in to this function
                if schema is None and self.normalize_name(
                        schema_int) == self.normalize_name(remote_schema):
                    rec['referred_schema'] = None
                else:
                    rec['referred_schema'] = self.normalize_name(remote_schema)

            local_cols.append(self.normalize_name(local_column))
            remote_cols.append(self.normalize_name(remote_column))

        result = list(fkeys.values())
        return result
Exemplo n.º 20
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_int = schema or connection.engine.url.database
        sql_stmnt = "SELECT constraint_name, column_name, referenced_schema, referenced_table, " \
                    "referenced_column FROM SYS.EXA_ALL_CONSTRAINT_COLUMNS " \
                    "WHERE constraint_type = 'FOREIGN KEY' AND constraint_table = :table_name " \
                    "AND constraint_schema = "
        if schema_int is None:
            sql_stmnt += "CURRENT_SCHEMA "
        else:
            sql_stmnt += ":schema "
        sql_stmnt += "ORDER BY ordinal_position"
        rp = connection.execute(sql.text(sql_stmnt),
                    table_name=self.denormalize_name(table_name),
                    schema=self.denormalize_name(schema_int))
        constraint_data = rp.fetchall()

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for row in constraint_data:
            (cons_name, local_column, remote_schema, remote_table, remote_column) = \
                    (row[0], row[1], row[2], row[3], row[4])
            rec = fkeys[self.normalize_name(cons_name)]
            rec['name'] = self.normalize_name(cons_name)
            local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = self.normalize_name(remote_table)
                # we need to take care of calls without schema. the sqla test suite
                # expects referred_schema to be None if None is passed in to this function
                if schema is None and schema_int == self.normalize_name(remote_schema):
                    rec['referred_schema'] = None
                else:
                    rec['referred_schema'] = self.normalize_name(remote_schema)

            local_cols.append(self.normalize_name(local_column))
            remote_cols.append(self.normalize_name(remote_column))

        return fkeys.values()
Exemplo n.º 21
0
def all_partial_orderings(tuples, elements):
    edges = defaultdict(set)
    for parent, child in tuples:
        edges[child].add(parent)

    def _all_orderings(elements):

        if len(elements) == 1:
            yield list(elements)
        else:
            for elem in elements:
                subset = set(elements).difference([elem])
                if not subset.intersection(edges[elem]):
                    for sub_ordering in _all_orderings(subset):
                        yield [elem] + sub_ordering

    return iter(_all_orderings(elements))
Exemplo n.º 22
0
def all_partial_orderings(tuples, elements):
    edges = defaultdict(set)
    for parent, child in tuples:
        edges[child].add(parent)

    def _all_orderings(elements):

        if len(elements) == 1:
            yield list(elements)
        else:
            for elem in elements:
                subset = set(elements).difference([elem])
                if not subset.intersection(edges[elem]):
                    for sub_ordering in _all_orderings(subset):
                        yield [elem] + sub_ordering

    return iter(_all_orderings(elements))
Exemplo n.º 23
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        # FK
        c = connection.execute(
        """select t1.constrname as cons_name , t1.constrtype as cons_type ,
                 t4.colname as local_column , t7.tabname as remote_table ,
                 t6.colname as remote_column
            from sysconstraints as t1 , systables as t2 ,
                 sysindexes as t3 , syscolumns as t4 ,
                 sysreferences as t5 , syscolumns as t6 , systables as t7 ,
                 sysconstraints as t8 , sysindexes as t9
           where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
             and t4.tabid = t2.tabid and t4.colno = t3.part1
             and t5.constrid = t1.constrid and t8.constrid = t5.primary
             and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname =
             t8.idxname
             and t7.tabid = t5.ptabid""", table.name.lower())


        def fkey_rec():
            return {
                 'name' : None,
                 'constrained_columns' : [],
                 'referred_schema' : None,
                 'referred_table' : None,
                 'referred_columns' : []
             }

        fkeys = util.defaultdict(fkey_rec)

        for cons_name, cons_type, local_column, \
                    remote_table, remote_column in rows:

            rec = fkeys[cons_name]
            rec['name'] = cons_name
            local_cols, remote_cols = \
                        rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = remote_table

            local_cols.append(local_column)
            remote_cols.append(remote_column)

        return fkeys.values()
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        # FK
        c = connection.execute(
        """select t1.constrname as cons_name , t1.constrtype as cons_type ,
                 t4.colname as local_column , t7.tabname as remote_table ,
                 t6.colname as remote_column
            from sysconstraints as t1 , systables as t2 ,
                 sysindexes as t3 , syscolumns as t4 ,
                 sysreferences as t5 , syscolumns as t6 , systables as t7 ,
                 sysconstraints as t8 , sysindexes as t9
           where t1.tabid = t2.tabid and t2.tabname=? and t1.constrtype = 'R'
             and t3.tabid = t2.tabid and t3.idxname = t1.idxname
             and t4.tabid = t2.tabid and t4.colno = t3.part1
             and t5.constrid = t1.constrid and t8.constrid = t5.primary
             and t6.tabid = t5.ptabid and t6.colno = t9.part1 and t9.idxname =
             t8.idxname
             and t7.tabid = t5.ptabid""", table.name.lower())


        def fkey_rec():
            return {
                 'name' : None,
                 'constrained_columns' : [],
                 'referred_schema' : None,
                 'referred_table' : None,
                 'referred_columns' : []
             }

        fkeys = util.defaultdict(fkey_rec)

        for cons_name, cons_type, local_column, \
                    remote_table, remote_column in rows:

            rec = fkeys[cons_name]
            rec['name'] = cons_name
            local_cols, remote_cols = \
                        rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = remote_table

            local_cols.append(local_column)
            remote_cols.append(remote_column)

        return fkeys.values()
Exemplo n.º 25
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_int = schema or connection.engine.url.database
        if schema_int is None:
            schema_int = connection.execute(
                "select CURRENT_SCHEMA from dual").scalar()
        table_name = self.denormalize_name(table_name)

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for row in self._get_all_constraints(connection,
                                             schema=schema,
                                             info_cache=kw.get("info_cache")):
            if (row[5] != table_name
                    and table_name is not None) or row[6] != 'FOREIGN KEY':
                continue
            (cons_name, local_column, remote_schema, remote_table, remote_column) = \
                    (row[0], row[1], row[2], row[3], row[4])
            rec = fkeys[self.normalize_name(cons_name)]
            rec['name'] = self.normalize_name(cons_name)
            local_cols, remote_cols = rec['constrained_columns'], rec[
                'referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = self.normalize_name(remote_table)
                # we need to take care of calls without schema. the sqla test suite
                # expects referred_schema to be None if None is passed in to this function
                if schema is None and self.normalize_name(
                        schema_int) == self.normalize_name(remote_schema):
                    rec['referred_schema'] = None
                else:
                    rec['referred_schema'] = self.normalize_name(remote_schema)

            local_cols.append(self.normalize_name(local_column))
            remote_cols.append(self.normalize_name(remote_column))

        return list(fkeys.values())
Exemplo n.º 26
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        # Query to extract the details of each UK/FK of the given table
        fkqry = """
        SELECT rc.rdb$constraint_name AS cname,
               cse.rdb$field_name AS fname,
               ix2.rdb$relation_name AS targetrname,
               se.rdb$field_name AS targetfname
        FROM rdb$relation_constraints rc
             JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
             JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
             JOIN rdb$index_segments cse ON
                        cse.rdb$index_name=ix1.rdb$index_name
             JOIN rdb$index_segments se
                  ON se.rdb$index_name=ix2.rdb$index_name
                     AND se.rdb$field_position=cse.rdb$field_position
        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
        ORDER BY se.rdb$index_name, se.rdb$field_position
        """
        tablename = self.denormalize_name(table_name)

        c = connection.exec_driver_sql(fkqry, ["FOREIGN KEY", tablename])
        fks = util.defaultdict(
            lambda: {
                "name": None,
                "constrained_columns": [],
                "referred_schema": None,
                "referred_table": None,
                "referred_columns": [],
            }
        )

        for row in c:
            cname = self.normalize_name(row["cname"])
            fk = fks[cname]
            if not fk["name"]:
                fk["name"] = cname
                fk["referred_table"] = self.normalize_name(row["targetrname"])
            fk["constrained_columns"].append(self.normalize_name(row["fname"]))
            fk["referred_columns"].append(
                self.normalize_name(row["targetfname"])
            )
        return list(fks.values())
Exemplo n.º 27
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        # Query to extract the details of each UK/FK of the given table
        fkqry = """
        SELECT rc.rdb$constraint_name AS cname,
               cse.rdb$field_name AS fname,
               ix2.rdb$relation_name AS targetrname,
               se.rdb$field_name AS targetfname
        FROM rdb$relation_constraints rc
             JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
             JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
             JOIN rdb$index_segments cse ON
                        cse.rdb$index_name=ix1.rdb$index_name
             JOIN rdb$index_segments se
                  ON se.rdb$index_name=ix2.rdb$index_name
                     AND se.rdb$field_position=cse.rdb$field_position
        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
        ORDER BY se.rdb$index_name, se.rdb$field_position
        """
        tablename = self.denormalize_name(table_name)

        c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
        fks = util.defaultdict(
            lambda: {
                "name": None,
                "constrained_columns": [],
                "referred_schema": None,
                "referred_table": None,
                "referred_columns": [],
            }
        )

        for row in c:
            cname = self.normalize_name(row["cname"])
            fk = fks[cname]
            if not fk["name"]:
                fk["name"] = cname
                fk["referred_table"] = self.normalize_name(row["targetrname"])
            fk["constrained_columns"].append(self.normalize_name(row["fname"]))
            fk["referred_columns"].append(
                self.normalize_name(row["targetfname"])
            )
        return list(fks.values())
Exemplo n.º 28
0
def sort_as_subsets(tuples, allitems):

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[child].add(parent)
    
    todo = set(allitems)

    while todo:
        output = set()
        for node in list(todo):
            if not todo.intersection(edges[node]):
                output.add(node)

        if not output:
            raise CircularDependencyError(
                    "Circular dependency detected: cycles: %r all edges: %s" % 
                    (find_cycles(tuples, allitems), _dump_edges(edges, True)))

        todo.difference_update(output)
        yield output
Exemplo n.º 29
0
def sort_as_subsets(tuples, allitems):

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[child].add(parent)

    todo = set(allitems)

    while todo:
        output = set()
        for node in list(todo):
            if not todo.intersection(edges[node]):
                output.add(node)

        if not output:
            raise CircularDependencyError("Circular dependency detected",
                                          find_cycles(tuples, allitems),
                                          _gen_edges(edges))

        todo.difference_update(output)
        yield output
Exemplo n.º 30
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        schema_int = schema or connection.engine.url.database
        if schema_int is None:
            schema_int = connection.execute("select CURRENT_SCHEMA from dual").scalar()
        table_name=self.denormalize_name(table_name)
        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for row in self._get_all_constraints(connection, schema=schema, info_cache=kw.get("info_cache")):
            if (row[5] != table_name and table_name is not None) or row[6] != 'FOREIGN KEY':
                continue
            (cons_name, local_column, remote_schema, remote_table, remote_column) = \
                    (row[0], row[1], row[2], row[3], row[4])
            rec = fkeys[self.normalize_name(cons_name)]
            rec['name'] = self.normalize_name(cons_name)
            local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']

            if not rec['referred_table']:
                rec['referred_table'] = self.normalize_name(remote_table)
                # we need to take care of calls without schema. the sqla test suite
                # expects referred_schema to be None if None is passed in to this function
                if schema is None and self.normalize_name(schema_int) == self.normalize_name(remote_schema):
                    rec['referred_schema'] = None
                else:
                    rec['referred_schema'] = self.normalize_name(remote_schema)

            local_cols.append(self.normalize_name(local_column))
            remote_cols.append(self.normalize_name(remote_column))

        return list(fkeys.values())
Exemplo n.º 31
0
def find_cycles(tuples, allitems):
    # straight from gvr with some mods

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[parent].add(child)
    nodes_to_test = set(edges)

    output = set()

    # we'd like to find all nodes that are
    # involved in cycles, so we do the full
    # pass through the whole thing for each
    # node in the original list.

    # we can go just through parent edge nodes.
    # if a node is only a child and never a parent,
    # by definition it can't be part of a cycle.  same
    # if it's not in the edges at all.
    for node in nodes_to_test:
        stack = [node]
        todo = nodes_to_test.difference(stack)
        while stack:
            top = stack[-1]
            for node in edges[top]:
                if node in stack:
                    cyc = stack[stack.index(node):]
                    todo.difference_update(cyc)
                    output.update(cyc)

                if node in todo:
                    stack.append(node)
                    todo.remove(node)
                    break
            else:
                node = stack.pop()
    return output
Exemplo n.º 32
0
def find_cycles(tuples, allitems):
    # straight from gvr with some mods

    edges = util.defaultdict(set)
    for parent, child in tuples:
        edges[parent].add(child)
    nodes_to_test = set(edges)

    output = set()

    # we'd like to find all nodes that are 
    # involved in cycles, so we do the full
    # pass through the whole thing for each
    # node in the original list.

    # we can go just through parent edge nodes.
    # if a node is only a child and never a parent,
    # by definition it can't be part of a cycle.  same
    # if it's not in the edges at all.
    for node in nodes_to_test:
        stack = [node]
        todo = nodes_to_test.difference(stack)
        while stack:
            top = stack[-1]
            for node in edges[top]:
                if node in stack:
                    cyc = stack[stack.index(node):]
                    todo.difference_update(cyc)
                    output.update(cyc)

                if node in todo:
                    stack.append(node)
                    todo.remove(node)
                    break
            else:
                node = stack.pop()
    return output
Exemplo n.º 33
0
 def __init__(self):
     self.parent_to_children = util.defaultdict(set)
     self.child_to_parents = util.defaultdict(set)
Exemplo n.º 34
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        """

        kw arguments can be:

            oracle_resolve_synonyms

            dblink

        """

        requested_schema = schema  # to check later on
        resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
        dblink = kw.get('dblink', '')
        info_cache = kw.get('info_cache')

        (table_name, schema, dblink, synonym) = \
            self._prepare_reflection_args(connection, table_name, schema,
                                          resolve_synonyms, dblink,
                                          info_cache=info_cache)

        constraint_data = self._get_constraint_data(connection, table_name,
                                                schema, dblink,
                                                info_cache=kw.get('info_cache'))

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for row in constraint_data:
            (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
                    row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])

            if cons_type == 'R':
                if remote_table is None:
                    # ticket 363
                    util.warn(
                        ("Got 'None' querying 'table_name' from "
                         "all_cons_columns%(dblink)s - does the user have "
                         "proper rights to the table?") % {'dblink': dblink})
                    continue

                rec = fkeys[cons_name]
                rec['name'] = cons_name
                local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']

                if not rec['referred_table']:
                    if resolve_synonyms:
                        ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
                                self._resolve_synonym(
                                    connection,
                                    desired_owner=self.denormalize_name(remote_owner),
                                    desired_table=self.denormalize_name(remote_table)
                                )
                        if ref_synonym:
                            remote_table = self.normalize_name(ref_synonym)
                            remote_owner = self.normalize_name(ref_remote_owner)

                    rec['referred_table'] = remote_table

                    if requested_schema is not None or self.denormalize_name(remote_owner) != schema:
                        rec['referred_schema'] = remote_owner

                local_cols.append(local_column)
                remote_cols.append(remote_column)

        return fkeys.values()
Exemplo n.º 35
0
    def get_foreign_keys(self, connection, table_name, schema=None, **kw):
        """

        kw arguments can be:

            oracle_resolve_synonyms

            dblink

        """

        requested_schema = schema  # to check later on
        resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
        dblink = kw.get('dblink', '')
        info_cache = kw.get('info_cache')

        (table_name, schema, dblink, synonym) = \
            self._prepare_reflection_args(connection, table_name, schema,
                                          resolve_synonyms, dblink,
                                          info_cache=info_cache)

        constraint_data = self._get_constraint_data(
            connection,
            table_name,
            schema,
            dblink,
            info_cache=kw.get('info_cache'))

        def fkey_rec():
            return {
                'name': None,
                'constrained_columns': [],
                'referred_schema': None,
                'referred_table': None,
                'referred_columns': []
            }

        fkeys = util.defaultdict(fkey_rec)

        for row in constraint_data:
            (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
                    row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])

            if cons_type == 'R':
                if remote_table is None:
                    # ticket 363
                    util.warn(
                        ("Got 'None' querying 'table_name' from "
                         "all_cons_columns%(dblink)s - does the user have "
                         "proper rights to the table?") % {'dblink': dblink})
                    continue

                rec = fkeys[cons_name]
                rec['name'] = cons_name
                local_cols, remote_cols = rec['constrained_columns'], rec[
                    'referred_columns']

                if not rec['referred_table']:
                    if resolve_synonyms:
                        ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
                                self._resolve_synonym(
                                    connection,
                                    desired_owner=self.denormalize_name(remote_owner),
                                    desired_table=self.denormalize_name(remote_table)
                                )
                        if ref_synonym:
                            remote_table = self.normalize_name(ref_synonym)
                            remote_owner = self.normalize_name(
                                ref_remote_owner)

                    rec['referred_table'] = remote_table

                    if requested_schema is not None or self.denormalize_name(
                            remote_owner) != schema:
                        rec['referred_schema'] = remote_owner

                local_cols.append(local_column)
                remote_cols.append(remote_column)

        return list(fkeys.values())
Exemplo n.º 36
0
 def __init__(self, fn):
     self.__name__ = fn.__name__
     self.__doc__ = fn.__doc__
     self._clslevel = util.defaultdict(list)
Exemplo n.º 37
0
def remove(target, identifier, fn):
    """Remove an event listener.

    Note that some event removals, particularly for those event dispatchers
    which create wrapper functions and secondary even listeners, may not yet
    be supported.

    """
    for evt_cls in _registrars[identifier]:
        for tgt in evt_cls._accept_with(target):
            tgt.dispatch._remove(identifier, tgt, fn, *args, **kw)
            return


_registrars = util.defaultdict(list)


def _is_event_name(name):
    return not name.startswith("_") and name != "dispatch"


class _UnpickleDispatch(object):
    """Serializable callable that re-generates an instance of :class:`_Dispatch`
    given a particular :class:`.Events` subclass.

    """

    def __call__(self, _parent_cls):
        for cls in _parent_cls.__mro__:
            if "dispatch" in cls.__dict__:
Exemplo n.º 38
0
 def __init__(self):
     self.parent_to_children = util.defaultdict(set)
     self.child_to_parents = util.defaultdict(set)
Exemplo n.º 39
0
def remove(target, identifier, fn):
    """Remove an event listener.

    Note that some event removals, particularly for those event dispatchers
    which create wrapper functions and secondary even listeners, may not yet
    be supported.

    """
    for evt_cls in _registrars[identifier]:
        for tgt in evt_cls._accept_with(target):
            tgt.dispatch._remove(identifier, tgt, fn, *args, **kw)
            return


_registrars = util.defaultdict(list)


def _is_event_name(name):
    return not name.startswith('_') and name != 'dispatch'


class _UnpickleDispatch(object):
    """Serializable callable that re-generates an instance of :class:`_Dispatch`
    given a particular :class:`.Events` subclass.

    """
    def __call__(self, _parent_cls):
        for cls in _parent_cls.__mro__:
            if 'dispatch' in cls.__dict__:
                return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
Exemplo n.º 40
0
 def __init__(self, fn):
     self.__name__ = fn.__name__
     self.__doc__ = fn.__doc__
     self._clslevel = util.defaultdict(list)