Exemplo n.º 1
0
def makeLockSchema(inSchema):
    """
    Create a self-contained schema just for L{Locker} use, in C{inSchema}.

    @param inSchema: a L{Schema} to add the locks table to.
    @type inSchema: L{Schema}

    @return: inSchema
    """
    LockTable = Table(inSchema, "NAMED_LOCK")

    LockTable.addColumn("LOCK_NAME", SQLType("varchar", 255))
    LockTable.tableConstraint(Constraint.NOT_NULL, ["LOCK_NAME"])
    LockTable.tableConstraint(Constraint.UNIQUE, ["LOCK_NAME"])
    LockTable.primaryKey = [LockTable.columnNamed("LOCK_NAME")]

    return inSchema
Exemplo n.º 2
0
def makeLockSchema(inSchema):
    """
    Create a self-contained schema just for L{Locker} use, in C{inSchema}.

    @param inSchema: a L{Schema} to add the locks table to.
    @type inSchema: L{Schema}

    @return: inSchema
    """
    LockTable = Table(inSchema, 'NAMED_LOCK')

    LockTable.addColumn("LOCK_NAME", SQLType("varchar", 255))
    LockTable.tableConstraint(Constraint.NOT_NULL, ["LOCK_NAME"])
    LockTable.tableConstraint(Constraint.UNIQUE, ["LOCK_NAME"])
    LockTable.primaryKey = [LockTable.columnNamed("LOCK_NAME")]

    return inSchema
Exemplo n.º 3
0
def dumpCurrentSchema(verbose=False):

    schema = Schema("Dumped schema")
    tables = {}

    # Tables
    rows = execSQL(
        "Schema tables...",
        "select table_name from information_schema.tables where table_schema = 'public';",
        verbose)
    for row in rows:
        name = row
        table = Table(schema, name)
        tables[name] = table

        # Columns
        rows = execSQL(
            "Reading table '{}' columns...".format(name),
            "select column_name from information_schema.columns where table_schema = 'public' and table_name = '{}';"
            .format(name), verbose)
        for row in rows:
            name = row
            # TODO: figure out the type
            column = Column(table, name, None)
            table.columns.append(column)

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = execSQL(
        "Schema indexes...",
        "select indexdef from pg_indexes where schemaname = 'public';",
        verbose)
    for indexdef in rows:
        addSQLToSchema(schema, indexdef.replace("public.", ""))

    # Sequences
    rows = execSQL(
        "Schema sequences...",
        "select sequence_name from information_schema.sequences where sequence_schema = 'public';",
        verbose)
    for row in rows:
        name = row
        Sequence(schema, name)

    return schema
Exemplo n.º 4
0
def tableFromCreateStatement(schema, stmt):
    """
    Add a table from a CREATE TABLE sqlparse statement object.

    @param schema: The schema to add the table statement to.
    @type schema: L{Schema}

    @param stmt: The C{CREATE TABLE} statement object.
    @type stmt: L{Statement}
    """
    i = iterSignificant(stmt)
    expect(i, ttype=Keyword.DDL, value="CREATE")
    expect(i, ttype=Keyword, value="TABLE")
    function = expect(i, cls=Function)
    i = iterSignificant(function)
    name = expect(i, cls=Identifier).get_name().encode("utf-8")
    self = Table(schema, name)
    parens = expect(i, cls=Parenthesis)
    cp = _ColumnParser(self, iterSignificant(parens), parens)
    cp.parse()
    return self
Exemplo n.º 5
0
def dumpCurrentSchema(verbose=False):

    schemaname = "public"

    schema = Schema("Dumped schema")

    # Sequences
    seqs = {}
    rows = execSQL(
        "Schema sequences...",
        "select sequence_name from information_schema.sequences where sequence_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = execSQL(
        "Schema tables...",
        "select table_name from information_schema.tables where table_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = execSQL(
            "Reading table '{}' columns...".format(name),
            "select column_name, data_type, is_nullable, character_maximum_length, column_default from information_schema.columns where table_schema = '%s' and table_name = '%s';" % (schemaname, name,),
            verbose,
        )
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(table, name.upper(), SQLType(DTYPE_MAP_POSTGRES.get(datatype, datatype), int(charlen) if charlen else 0))
            table.columns.append(column)
            if default:
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_POSTGRES:
                    column.default = DEFAULTVALUE_MAP_POSTGRES[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "NO":
                table.tableConstraint(Constraint.NOT_NULL, [column.name, ])

    # Key columns
    keys = {}
    rows = execSQL(
        "Schema key columns...",
        "select constraint_name, table_name, column_name from information_schema.key_column_usage where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, tname, cname in rows:
        keys[conname] = (tname, cname)

    # Constraints
    constraints = {}
    rows = execSQL(
        "SChema constraints...",
        "select constraint_name, table_name, column_name from information_schema.constraint_column_usage where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, tname, cname in rows:
        constraints[conname] = (tname, cname)

    # References - referential_constraints
    rows = execSQL(
        "Schema referential constraints...",
        "select constraint_name, unique_constraint_name, delete_rule from information_schema.referential_constraints where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, uconname, delete in rows:
        table = tables[keys[conname][0].upper()]
        column = table.columnNamed(keys[conname][1].upper())
        column.doesReferenceName(constraints[uconname][0].upper())
        if delete != "NO ACTION":
            column.deleteAction = delete.lower()

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = execSQL(
        "Schema indexes...",
        "select indexdef from pg_indexes where schemaname = '%s';" % (schemaname,),
        verbose
    )
    for indexdef in rows:
        addSQLToSchema(schema, indexdef[0].replace("%s." % (schemaname,), "").upper())

    # Functions
    rows = execSQL(
        "Schema functions",
        "select routine_name from information_schema.routines where routine_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        Function(schema, name)

    return schema
Exemplo n.º 6
0
def makeJobSchema(inSchema):
    """
    Create a self-contained schema for L{JobInfo} to use, in C{inSchema}.

    @param inSchema: a L{Schema} to add the job table to.
    @type inSchema: L{Schema}

    @return: a schema with just the one table.
    """
    # Initializing this duplicate schema avoids a circular dependency, but this
    # should really be accomplished with independent schema objects that the
    # transaction is made aware of somehow.
    JobTable = Table(inSchema, "JOB")

    JobTable.addColumn("JOB_ID", SQLType("integer", None), default=Sequence(inSchema, "JOB_SEQ"), notNull=True, primaryKey=True)
    JobTable.addColumn("WORK_TYPE", SQLType("varchar", 255), notNull=True)
    JobTable.addColumn("PRIORITY", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("WEIGHT", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("NOT_BEFORE", SQLType("timestamp", None), notNull=True)
    JobTable.addColumn("IS_ASSIGNED", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("ASSIGNED", SQLType("timestamp", None), default=None)
    JobTable.addColumn("OVERDUE", SQLType("timestamp", None), default=None)
    JobTable.addColumn("FAILED", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("PAUSE", SQLType("integer", 0), default=0, notNull=True)

    return inSchema
Exemplo n.º 7
0
def dumpSchema_oracle(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from all_sequences where sequence_owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL(
        "select table_name from all_tables where owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, nullable, char_length, data_default from all_tab_columns where owner = '%s' and table_name = '%s'"
            % (
                schemaname.upper(),
                name,
            ))
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(
                table, name.upper(),
                SQLType(
                    DTYPE_MAP_ORACLE.get(datatype.lower(), datatype.lower()),
                    charlen))
            table.columns.append(column)
            if default:
                default = default.strip()
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_ORACLE:
                    column.default = DEFAULTVALUE_MAP_ORACLE[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "N":
                table.tableConstraint(Constraint.NOT_NULL, [
                    column.name,
                ])

    # Constraints
    constraints = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name, position from all_cons_columns where owner = '%s'"
        % (schemaname.upper(), ))
    for conname, tname, cname, position in rows:
        constraints[conname].append((
            tname,
            cname,
            position,
        ))
    rows = yield txn.execSQL(
        "select constraint_name, constraint_type, table_name, r_constraint_name, delete_rule from all_constraints where owner = '%s'"
        % (schemaname.upper(), ))
    for conname, conntype, tname, r_constraint_name, delete_rule in rows:
        if constraints[conname][0][0].upper() in tables:
            constraint = constraints[conname]
            constraint = sorted(constraint, key=lambda x: x[2])
            table = tables[constraint[0][0].upper()]
            column_names = [item[1].upper() for item in constraint]
            columns = [
                table.columnNamed(column_name) for column_name in column_names
            ]
            if conntype == "P":
                table.primaryKey = columns
            elif conntype == "U":
                table.tableConstraint(Constraint.UNIQUE, column_names)
            elif conntype == "R":
                columns[0].doesReferenceName(
                    constraints[r_constraint_name][0][0].upper())
                if delete_rule.lower() != "no action":
                    columns[0].deleteAction = delete_rule.lower()

    # Indexed columns
    idx = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select index_name, column_name, column_position from all_ind_columns where index_owner = '%s'"
        % (schemaname.upper(), ))
    for index_name, column_name, column_position in rows:
        idx[index_name].append((column_name, column_position))

    # Indexes
    rows = yield txn.execSQL(
        "select index_name, table_name, uniqueness from all_indexes where owner = '%s'"
        % (schemaname.upper(), ))
    for index_name, table_name, uniqueness in rows:
        if table_name in tables:
            table = tables[table_name]
            column_names = [
                item[0].upper()
                for item in sorted(idx[index_name], key=lambda x: x[1])
            ]
            columns = [
                table.columnNamed(column_name) for column_name in column_names
            ]
            index = Index(schema, index_name.upper(), table,
                          uniqueness == "UNIQUE")
            for column in columns:
                index.addColumn(column)

    # Functions
    rows = yield txn.execSQL(
        "select object_name from all_procedures where owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        Function(schema, name)

    returnValue(schema)
Exemplo n.º 8
0
def dumpSchema(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from information_schema.sequences where sequence_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL(
        "select table_name from information_schema.tables where table_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, character_maximum_length, column_default from information_schema.columns where table_schema = '%s' and table_name = '%s';"
            % (
                schemaname,
                name,
            ))
        for name, datatype, charlen, default in rows:
            # TODO: figure out the type
            column = Column(
                table, name.upper(),
                SQLType(DTYPE_MAP.get(datatype, datatype), charlen))
            table.columns.append(column)
            if default:
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP:
                    column.default = DEFAULTVALUE_MAP[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default

    # Key columns
    keys = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.key_column_usage where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, tname, cname in rows:
        keys[conname] = (tname, cname)

    # Constraints
    constraints = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.constraint_column_usage where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, tname, cname in rows:
        constraints[conname] = (tname, cname)

    # References - referential_constraints
    rows = yield txn.execSQL(
        "select constraint_name, unique_constraint_name, delete_rule from information_schema.referential_constraints where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, uconname, delete in rows:
        table = tables[keys[conname][0].upper()]
        column = table.columnNamed(keys[conname][1].upper())
        column.doesReferenceName(constraints[uconname][0].upper())
        if delete != "NO ACTION":
            column.deleteAction = delete.lower()

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = yield txn.execSQL(
        "select indexdef from pg_indexes where schemaname = '%s';" %
        (schemaname, ))
    for indexdef in rows:
        addSQLToSchema(schema, indexdef[0].replace("%s." % (schemaname, ),
                                                   "").upper())

    # Functions
    rows = yield txn.execSQL(
        "select routine_name from information_schema.routines where routine_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        Function(schema, name)

    returnValue(schema)
Exemplo n.º 9
0
def dumpSchema_postgres(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from information_schema.sequences where sequence_schema = '%s';" % (schemaname,)
    )
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL(
        "select table_name from information_schema.tables where table_schema = '%s';" % (schemaname,)
    )
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, is_nullable, character_maximum_length, column_default from information_schema.columns where table_schema = '%s' and table_name = '%s';"
            % (schemaname, name)
        )
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(table, name.upper(), SQLType(DTYPE_MAP_POSTGRES.get(datatype, datatype), charlen))
            table.columns.append(column)
            if default:
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_POSTGRES:
                    column.default = DEFAULTVALUE_MAP_POSTGRES[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "NO":
                table.tableConstraint(Constraint.NOT_NULL, [column.name])

    # Key columns
    keys = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.key_column_usage where constraint_schema = '%s';"
        % (schemaname,)
    )
    for conname, tname, cname in rows:
        keys[conname] = (tname, cname)

    # Constraints
    constraints = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.constraint_column_usage where constraint_schema = '%s';"
        % (schemaname,)
    )
    for conname, tname, cname in rows:
        constraints[conname] = (tname, cname)

    # References - referential_constraints
    rows = yield txn.execSQL(
        "select constraint_name, unique_constraint_name, delete_rule from information_schema.referential_constraints where constraint_schema = '%s';"
        % (schemaname,)
    )
    for conname, uconname, delete in rows:
        table = tables[keys[conname][0].upper()]
        column = table.columnNamed(keys[conname][1].upper())
        column.doesReferenceName(constraints[uconname][0].upper())
        if delete != "NO ACTION":
            column.deleteAction = delete.lower()

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = yield txn.execSQL("select indexdef from pg_indexes where schemaname = '%s';" % (schemaname,))
    for indexdef in rows:
        addSQLToSchema(schema, indexdef[0].replace("%s." % (schemaname,), "").upper())

    # Functions
    rows = yield txn.execSQL(
        "select routine_name from information_schema.routines where routine_schema = '%s';" % (schemaname,)
    )
    for row in rows:
        name = row[0]
        Function(schema, name)

    returnValue(schema)
Exemplo n.º 10
0
def dumpSchema_oracle(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from all_sequences where sequence_owner = '%s'" % (schemaname.upper(),)
    )
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL("select table_name from all_tables where owner = '%s'" % (schemaname.upper(),))
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, nullable, char_length, data_default from all_tab_columns where owner = '%s' and table_name = '%s'"
            % (schemaname.upper(), name)
        )
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(
                table, name.upper(), SQLType(DTYPE_MAP_ORACLE.get(datatype.lower(), datatype.lower()), charlen)
            )
            table.columns.append(column)
            if default:
                default = default.strip()
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_ORACLE:
                    column.default = DEFAULTVALUE_MAP_ORACLE[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "N":
                table.tableConstraint(Constraint.NOT_NULL, [column.name])

    # Constraints
    constraints = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name, position from all_cons_columns where owner = '%s'"
        % (schemaname.upper(),)
    )
    for conname, tname, cname, position in rows:
        constraints[conname].append((tname, cname, position))
    rows = yield txn.execSQL(
        "select constraint_name, constraint_type, table_name, r_constraint_name, delete_rule from all_constraints where owner = '%s'"
        % (schemaname.upper(),)
    )
    for conname, conntype, tname, r_constraint_name, delete_rule in rows:
        if constraints[conname][0][0].upper() in tables:
            constraint = constraints[conname]
            constraint = sorted(constraint, key=lambda x: x[2])
            table = tables[constraint[0][0].upper()]
            column_names = [item[1].upper() for item in constraint]
            columns = [table.columnNamed(column_name) for column_name in column_names]
            if conntype == "P":
                table.primaryKey = columns
            elif conntype == "U":
                table.tableConstraint(Constraint.UNIQUE, column_names)
            elif conntype == "R":
                columns[0].doesReferenceName(constraints[r_constraint_name][0][0].upper())
                if delete_rule.lower() != "no action":
                    columns[0].deleteAction = delete_rule.lower()

    # Indexed columns
    idx = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select index_name, column_name, column_position from all_ind_columns where index_owner = '%s'"
        % (schemaname.upper(),)
    )
    for index_name, column_name, column_position in rows:
        idx[index_name].append((column_name, column_position))

    # Indexes
    rows = yield txn.execSQL(
        "select index_name, table_name, uniqueness from all_indexes where owner = '%s'" % (schemaname.upper(),)
    )
    for index_name, table_name, uniqueness in rows:
        if table_name in tables:
            table = tables[table_name]
            column_names = [item[0].upper() for item in sorted(idx[index_name], key=lambda x: x[1])]
            columns = [table.columnNamed(column_name) for column_name in column_names]
            index = Index(schema, index_name.upper(), table, uniqueness == "UNIQUE")
            for column in columns:
                index.addColumn(column)

    returnValue(schema)