Esempio n. 1
0
def makeJobSchema(inSchema):
    """
    Create a self-contained schema for L{JobInfo} to use, in C{inSchema}.

    @param inSchema: a L{Schema} to add the job table to.
    @type inSchema: L{Schema}

    @return: a schema with just the one table.
    """
    # Initializing this duplicate schema avoids a circular dependency, but this
    # should really be accomplished with independent schema objects that the
    # transaction is made aware of somehow.
    JobTable = Table(inSchema, "JOB")

    JobTable.addColumn("JOB_ID", SQLType("integer", None), default=Sequence(inSchema, "JOB_SEQ"), notNull=True, primaryKey=True)
    JobTable.addColumn("WORK_TYPE", SQLType("varchar", 255), notNull=True)
    JobTable.addColumn("PRIORITY", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("WEIGHT", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("NOT_BEFORE", SQLType("timestamp", None), notNull=True)
    JobTable.addColumn("IS_ASSIGNED", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("ASSIGNED", SQLType("timestamp", None), default=None)
    JobTable.addColumn("OVERDUE", SQLType("timestamp", None), default=None)
    JobTable.addColumn("FAILED", SQLType("integer", 0), default=0, notNull=True)
    JobTable.addColumn("PAUSE", SQLType("integer", 0), default=0, notNull=True)

    return inSchema
Esempio n. 2
0
def dumpCurrentSchema(verbose=False):

    schema = Schema("Dumped schema")
    tables = {}

    # Tables
    rows = execSQL(
        "Schema tables...",
        "select table_name from information_schema.tables where table_schema = 'public';",
        verbose)
    for row in rows:
        name = row
        table = Table(schema, name)
        tables[name] = table

        # Columns
        rows = execSQL(
            "Reading table '{}' columns...".format(name),
            "select column_name from information_schema.columns where table_schema = 'public' and table_name = '{}';"
            .format(name), verbose)
        for row in rows:
            name = row
            # TODO: figure out the type
            column = Column(table, name, None)
            table.columns.append(column)

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = execSQL(
        "Schema indexes...",
        "select indexdef from pg_indexes where schemaname = 'public';",
        verbose)
    for indexdef in rows:
        addSQLToSchema(schema, indexdef.replace("public.", ""))

    # Sequences
    rows = execSQL(
        "Schema sequences...",
        "select sequence_name from information_schema.sequences where sequence_schema = 'public';",
        verbose)
    for row in rows:
        name = row
        Sequence(schema, name)

    return schema
Esempio n. 3
0
def dumpCurrentSchema(verbose=False):

    schemaname = "public"

    schema = Schema("Dumped schema")

    # Sequences
    seqs = {}
    rows = execSQL(
        "Schema sequences...",
        "select sequence_name from information_schema.sequences where sequence_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = execSQL(
        "Schema tables...",
        "select table_name from information_schema.tables where table_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = execSQL(
            "Reading table '{}' columns...".format(name),
            "select column_name, data_type, is_nullable, character_maximum_length, column_default from information_schema.columns where table_schema = '%s' and table_name = '%s';" % (schemaname, name,),
            verbose,
        )
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(table, name.upper(), SQLType(DTYPE_MAP_POSTGRES.get(datatype, datatype), int(charlen) if charlen else 0))
            table.columns.append(column)
            if default:
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_POSTGRES:
                    column.default = DEFAULTVALUE_MAP_POSTGRES[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "NO":
                table.tableConstraint(Constraint.NOT_NULL, [column.name, ])

    # Key columns
    keys = {}
    rows = execSQL(
        "Schema key columns...",
        "select constraint_name, table_name, column_name from information_schema.key_column_usage where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, tname, cname in rows:
        keys[conname] = (tname, cname)

    # Constraints
    constraints = {}
    rows = execSQL(
        "SChema constraints...",
        "select constraint_name, table_name, column_name from information_schema.constraint_column_usage where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, tname, cname in rows:
        constraints[conname] = (tname, cname)

    # References - referential_constraints
    rows = execSQL(
        "Schema referential constraints...",
        "select constraint_name, unique_constraint_name, delete_rule from information_schema.referential_constraints where constraint_schema = '%s';" % (schemaname,),
        verbose
    )
    for conname, uconname, delete in rows:
        table = tables[keys[conname][0].upper()]
        column = table.columnNamed(keys[conname][1].upper())
        column.doesReferenceName(constraints[uconname][0].upper())
        if delete != "NO ACTION":
            column.deleteAction = delete.lower()

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = execSQL(
        "Schema indexes...",
        "select indexdef from pg_indexes where schemaname = '%s';" % (schemaname,),
        verbose
    )
    for indexdef in rows:
        addSQLToSchema(schema, indexdef[0].replace("%s." % (schemaname,), "").upper())

    # Functions
    rows = execSQL(
        "Schema functions",
        "select routine_name from information_schema.routines where routine_schema = '%s';" % (schemaname,),
        verbose
    )
    for row in rows:
        name = row[0]
        Function(schema, name)

    return schema
Esempio n. 4
0
def addSQLToSchema(schema, schemaData):
    """
    Add new SQL to an existing schema.

    @param schema: The schema to add the new SQL to.
    @type schema: L{Schema}

    @param schemaData: A string containing some SQL statements.
    @type schemaData: C{str}

    @return: the C{schema} argument
    """
    parsed = parse(schemaData)

    for stmt in parsed:
        preface = ""

        while stmt.tokens and not significant(stmt.tokens[0]):
            preface += str(stmt.tokens.pop(0))

        if not stmt.tokens:
            continue

        if stmt.get_type() == "CREATE":
            createType = stmt.token_next(1, True).value.upper()

            if createType == u"TABLE":
                t = tableFromCreateStatement(schema, stmt)
                t.addComment(preface)

            elif createType == u"SEQUENCE":
                Sequence(schema,
                         stmt.token_next(2, True).get_name().encode("utf-8"))

            elif createType in (u"INDEX", u"UNIQUE"):
                signifindex = iterSignificant(stmt)
                expect(signifindex, ttype=Keyword.DDL, value="CREATE")
                token = signifindex.next()
                unique = False

                if token.match(Keyword, "UNIQUE"):
                    unique = True
                    token = signifindex.next()

                if not token.match(Keyword, "INDEX"):
                    raise ViolatedExpectation("INDEX or UNQIUE", token.value)

                indexName = nameOrIdentifier(signifindex.next())
                expect(signifindex, ttype=Keyword, value="ON")
                token = signifindex.next()

                if isinstance(token, Function):
                    [tableName, columnArgs] = iterSignificant(token)
                else:
                    tableName = token
                    token = signifindex.next()

                    if token.match(Keyword, "USING"):
                        [_ignore, columnArgs
                         ] = iterSignificant(expect(signifindex, cls=Function))
                    else:
                        raise ViolatedExpectation("USING", token)

                tableName = nameOrIdentifier(tableName)
                arggetter = iterSignificant(columnArgs)

                expect(arggetter, ttype=Punctuation, value=u"(")
                valueOrValues = arggetter.next()

                if isinstance(valueOrValues, IdentifierList):
                    valuelist = valueOrValues.get_identifiers()
                else:
                    valuelist = [valueOrValues]

                expect(arggetter, ttype=Punctuation, value=u")")

                idx = Index(schema, indexName, schema.tableNamed(tableName),
                            unique)

                for token in valuelist:
                    columnName = nameOrIdentifier(token)
                    idx.addColumn(idx.table.columnNamed(columnName))

            elif createType == u"FUNCTION":
                parseFunction(schema, stmt)

        elif stmt.get_type() == "INSERT":
            insertTokens = iterSignificant(stmt)
            expect(insertTokens, ttype=Keyword.DML, value="INSERT")
            expect(insertTokens, ttype=Keyword, value="INTO")

            token = insertTokens.next()

            if isinstance(token, Function):
                [tableName, columnArgs] = iterSignificant(token)
                tableName = tableName.get_name()
                columns = namesInParens(columnArgs)
            else:
                tableName = token.get_name()
                columns = None

            expect(insertTokens, ttype=Keyword, value="VALUES")

            values = expect(insertTokens, cls=Parenthesis)
            vals = iterSignificant(values)
            expect(vals, ttype=Punctuation, value="(")

            valuelist = expect(vals, cls=IdentifierList)
            expect(vals, ttype=Punctuation, value=")")

            rowData = []

            for ident in valuelist.get_identifiers():
                rowData.append({
                    Number.Integer: int,
                    String.Single: _destringify
                }[ident.ttype](ident.value))

            schema.tableNamed(tableName).insertSchemaRow(rowData,
                                                         columns=columns)

        elif stmt.get_type() == "CREATE OR REPLACE":
            createType = stmt.token_next(1, True).value.upper()

            if createType == u"FUNCTION":
                parseFunction(schema, stmt)

        else:
            print("unknown type:", stmt.get_type())

    return schema
Esempio n. 5
0
def dumpSchema_oracle(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from all_sequences where sequence_owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL(
        "select table_name from all_tables where owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, nullable, char_length, data_default from all_tab_columns where owner = '%s' and table_name = '%s'"
            % (
                schemaname.upper(),
                name,
            ))
        for name, datatype, is_nullable, charlen, default in rows:
            # TODO: figure out the type
            column = Column(
                table, name.upper(),
                SQLType(
                    DTYPE_MAP_ORACLE.get(datatype.lower(), datatype.lower()),
                    charlen))
            table.columns.append(column)
            if default:
                default = default.strip()
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP_ORACLE:
                    column.default = DEFAULTVALUE_MAP_ORACLE[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default
            if is_nullable == "N":
                table.tableConstraint(Constraint.NOT_NULL, [
                    column.name,
                ])

    # Constraints
    constraints = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name, position from all_cons_columns where owner = '%s'"
        % (schemaname.upper(), ))
    for conname, tname, cname, position in rows:
        constraints[conname].append((
            tname,
            cname,
            position,
        ))
    rows = yield txn.execSQL(
        "select constraint_name, constraint_type, table_name, r_constraint_name, delete_rule from all_constraints where owner = '%s'"
        % (schemaname.upper(), ))
    for conname, conntype, tname, r_constraint_name, delete_rule in rows:
        if constraints[conname][0][0].upper() in tables:
            constraint = constraints[conname]
            constraint = sorted(constraint, key=lambda x: x[2])
            table = tables[constraint[0][0].upper()]
            column_names = [item[1].upper() for item in constraint]
            columns = [
                table.columnNamed(column_name) for column_name in column_names
            ]
            if conntype == "P":
                table.primaryKey = columns
            elif conntype == "U":
                table.tableConstraint(Constraint.UNIQUE, column_names)
            elif conntype == "R":
                columns[0].doesReferenceName(
                    constraints[r_constraint_name][0][0].upper())
                if delete_rule.lower() != "no action":
                    columns[0].deleteAction = delete_rule.lower()

    # Indexed columns
    idx = collections.defaultdict(list)
    rows = yield txn.execSQL(
        "select index_name, column_name, column_position from all_ind_columns where index_owner = '%s'"
        % (schemaname.upper(), ))
    for index_name, column_name, column_position in rows:
        idx[index_name].append((column_name, column_position))

    # Indexes
    rows = yield txn.execSQL(
        "select index_name, table_name, uniqueness from all_indexes where owner = '%s'"
        % (schemaname.upper(), ))
    for index_name, table_name, uniqueness in rows:
        if table_name in tables:
            table = tables[table_name]
            column_names = [
                item[0].upper()
                for item in sorted(idx[index_name], key=lambda x: x[1])
            ]
            columns = [
                table.columnNamed(column_name) for column_name in column_names
            ]
            index = Index(schema, index_name.upper(), table,
                          uniqueness == "UNIQUE")
            for column in columns:
                index.addColumn(column)

    # Functions
    rows = yield txn.execSQL(
        "select object_name from all_procedures where owner = '%s'" %
        (schemaname.upper(), ))
    for row in rows:
        name = row[0]
        Function(schema, name)

    returnValue(schema)
Esempio n. 6
0
def dumpSchema(txn, title, schemaname="public"):
    """
    Generate the L{Schema}.
    """

    schemaname = schemaname.lower()

    schema = Schema(title)

    # Sequences
    seqs = {}
    rows = yield txn.execSQL(
        "select sequence_name from information_schema.sequences where sequence_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        seqs[name.upper()] = Sequence(schema, name.upper())

    # Tables
    tables = {}
    rows = yield txn.execSQL(
        "select table_name from information_schema.tables where table_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        table = Table(schema, name.upper())
        tables[name.upper()] = table

        # Columns
        rows = yield txn.execSQL(
            "select column_name, data_type, character_maximum_length, column_default from information_schema.columns where table_schema = '%s' and table_name = '%s';"
            % (
                schemaname,
                name,
            ))
        for name, datatype, charlen, default in rows:
            # TODO: figure out the type
            column = Column(
                table, name.upper(),
                SQLType(DTYPE_MAP.get(datatype, datatype), charlen))
            table.columns.append(column)
            if default:
                if default.startswith("nextval("):
                    dname = default.split("'")[1].split(".")[-1]
                    column.default = seqs[dname.upper()]
                elif default in DEFAULTVALUE_MAP:
                    column.default = DEFAULTVALUE_MAP[default]
                else:
                    try:
                        column.default = int(default)
                    except ValueError:
                        column.default = default

    # Key columns
    keys = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.key_column_usage where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, tname, cname in rows:
        keys[conname] = (tname, cname)

    # Constraints
    constraints = {}
    rows = yield txn.execSQL(
        "select constraint_name, table_name, column_name from information_schema.constraint_column_usage where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, tname, cname in rows:
        constraints[conname] = (tname, cname)

    # References - referential_constraints
    rows = yield txn.execSQL(
        "select constraint_name, unique_constraint_name, delete_rule from information_schema.referential_constraints where constraint_schema = '%s';"
        % (schemaname, ))
    for conname, uconname, delete in rows:
        table = tables[keys[conname][0].upper()]
        column = table.columnNamed(keys[conname][1].upper())
        column.doesReferenceName(constraints[uconname][0].upper())
        if delete != "NO ACTION":
            column.deleteAction = delete.lower()

    # Indexes
    # TODO: handle implicit indexes created via primary key() and unique() statements within CREATE TABLE
    rows = yield txn.execSQL(
        "select indexdef from pg_indexes where schemaname = '%s';" %
        (schemaname, ))
    for indexdef in rows:
        addSQLToSchema(schema, indexdef[0].replace("%s." % (schemaname, ),
                                                   "").upper())

    # Functions
    rows = yield txn.execSQL(
        "select routine_name from information_schema.routines where routine_schema = '%s';"
        % (schemaname, ))
    for row in rows:
        name = row[0]
        Function(schema, name)

    returnValue(schema)