Example #1
0
    def reflecttable(self, connection, table, include_columns):
        preparer = self.identifier_preparer
        if table.schema is None:
            pragma = "PRAGMA "
        else:
            pragma = "PRAGMA %s." % preparer.quote_identifier(table.schema)
        qtable = preparer.format_table(table, False)

        c = connection.execute("%stable_info(%s)" % (pragma, qtable))
        found_table = False
        while True:
            row = c.fetchone()
            if row is None:
                break

            found_table = True
            (name, type_, nullable, default, has_default,
             primary_key) = (row[1], row[2].upper(), not row[3], row[4], row[4]
                             is not None, row[5])
            name = re.sub(r'^\"|\"$', '', name)
            if include_columns and name not in include_columns:
                continue
            match = re.match(r'(\w+)(\(.*?\))?', type_)
            if match:
                coltype = match.group(1)
                args = match.group(2)
            else:
                coltype = "VARCHAR"
                args = ''

            try:
                coltype = ischema_names[coltype]
            except KeyError:
                util.warn("Did not recognize type '%s' of column '%s'" %
                          (coltype, name))
                coltype = sqltypes.NullType

            if args is not None:
                args = re.findall(r'(\d+)', args)
                coltype = coltype(*[int(a) for a in args])

            colargs = []
            if has_default:
                colargs.append(DefaultClause(sql.text(default)))
            table.append_column(
                schema.Column(name,
                              coltype,
                              primary_key=primary_key,
                              nullable=nullable,
                              *colargs))

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        c = connection.execute("%sforeign_key_list(%s)" % (pragma, qtable))
        fks = {}
        while True:
            row = c.fetchone()
            if row is None:
                break
            (constraint_name, tablename, localcol,
             remotecol) = (row[0], row[2], row[3], row[4])
            tablename = re.sub(r'^\"|\"$', '', tablename)
            localcol = re.sub(r'^\"|\"$', '', localcol)
            remotecol = re.sub(r'^\"|\"$', '', remotecol)
            try:
                fk = fks[constraint_name]
            except KeyError:
                fk = ([], [])
                fks[constraint_name] = fk

            # look up the table based on the given table's engine, not 'self',
            # since it could be a ProxyEngine
            remotetable = schema.Table(tablename,
                                       table.metadata,
                                       autoload=True,
                                       autoload_with=connection)
            constrained_column = table.c[localcol].name
            refspec = ".".join([tablename, remotecol])
            if constrained_column not in fk[0]:
                fk[0].append(constrained_column)
            if refspec not in fk[1]:
                fk[1].append(refspec)
        for name, value in fks.iteritems():
            table.append_constraint(
                schema.ForeignKeyConstraint(value[0],
                                            value[1],
                                            link_to_name=True))
        # check for UNIQUE indexes
        c = connection.execute("%sindex_list(%s)" % (pragma, qtable))
        unique_indexes = []
        while True:
            row = c.fetchone()
            if row is None:
                break
            if (row[2] == 1):
                unique_indexes.append(row[1])
        # loop thru unique indexes for one that includes the primary key
        for idx in unique_indexes:
            c = connection.execute("%sindex_info(%s)" % (pragma, idx))
            cols = []
            while True:
                row = c.fetchone()
                if row is None:
                    break
                cols.append(row[2])
Example #2
0
    def test_autoincrement(self, metadata, connection):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all(connection)

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData()

        for name in table_names:
            tbl = Table(name, mr, autoload_with=connection)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not(tbl._autoincrement_column, col)

            eng = [
                engines.testing_engine(options={"implicit_returning": False}),
                engines.testing_engine(options={"implicit_returning": True}),
            ]

            for counter, engine in enumerate(eng):
                connection.execute(tbl.insert())
                if "int_y" in tbl.c:
                    eq_(
                        connection.execute(select(tbl.c.int_y)).scalar(),
                        counter + 1,
                    )
                    assert (list(connection.execute(
                        tbl.select()).first()).count(counter + 1) == 1)
                else:
                    assert 1 not in list(
                        connection.execute(tbl.select()).first())
                connection.execute(tbl.delete())
Example #3
0
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION

from credovi.schema import metadata, schema
from credovi.util.sqlalchemy import Vector3D

# RAW CHAINS
raw_chains = Table('raw_chains', metadata,
                   Column('pdb', String(4), nullable=False),
                   Column('assembly_serial', Integer, nullable=False, autoincrement=False),
                   Column('entity_serial', Integer, nullable=False, autoincrement=False),
                   Column('pdb_chain_id', String(1), nullable=False),
                   Column('pdb_chain_asu_id', String(1), nullable=False),
                   Column('chain_type', String(50)),
                   Column('rotation', ARRAY(DOUBLE_PRECISION)),
                   Column('translation', ARRAY(DOUBLE_PRECISION)),
                   Column('is_at_identity', Boolean(create_constraint=False), DefaultClause('false'), nullable=False),
                   schema=schema, prefixes=['unlogged'])

PrimaryKeyConstraint(raw_chains.c.pdb, raw_chains.c.assembly_serial,
                     raw_chains.c.entity_serial, deferrable=True, initially='deferred')

# RAW LIGANDS
raw_ligands = Table('raw_ligands', metadata,
                    Column('pdb', String(4), nullable=False, primary_key=True),
                    Column('assembly_serial', Integer, nullable=False, primary_key=True, autoincrement=False),
                    Column('entity_serial', Integer, nullable=False, autoincrement=False, primary_key=True),
                    Column('pdb_chain_id', String(1), nullable=False),
                    Column('res_num', Integer),
                    Column('ligand_name', String(64), nullable=False),
                    Column('num_hvy_atoms', Integer),
                    Column('ism', Text),
Example #4
0
class TypeCompileTest(fixtures.TestBase, AssertsCompiledSQL):
    __dialect__ = mysql.dialect()

    @testing.combinations(
        # column type, args, kwargs, expected ddl
        # e.g. Column(Integer(10, unsigned=True)) ==
        # 'INTEGER(10) UNSIGNED'
        (mysql.MSNumeric, [], {}, "NUMERIC"),
        (mysql.MSNumeric, [None], {}, "NUMERIC"),
        (mysql.MSNumeric, [12], {}, "NUMERIC(12)"),
        (
            mysql.MSNumeric,
            [12, 4],
            {
                "unsigned": True
            },
            "NUMERIC(12, 4) UNSIGNED",
        ),
        (
            mysql.MSNumeric,
            [12, 4],
            {
                "zerofill": True
            },
            "NUMERIC(12, 4) ZEROFILL",
        ),
        (
            mysql.MSNumeric,
            [12, 4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "NUMERIC(12, 4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSDecimal, [], {}, "DECIMAL"),
        (mysql.MSDecimal, [None], {}, "DECIMAL"),
        (mysql.MSDecimal, [12], {}, "DECIMAL(12)"),
        (mysql.MSDecimal, [12, None], {}, "DECIMAL(12)"),
        (
            mysql.MSDecimal,
            [12, 4],
            {
                "unsigned": True
            },
            "DECIMAL(12, 4) UNSIGNED",
        ),
        (
            mysql.MSDecimal,
            [12, 4],
            {
                "zerofill": True
            },
            "DECIMAL(12, 4) ZEROFILL",
        ),
        (
            mysql.MSDecimal,
            [12, 4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "DECIMAL(12, 4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSDouble, [None, None], {}, "DOUBLE"),
        (
            mysql.MSDouble,
            [12, 4],
            {
                "unsigned": True
            },
            "DOUBLE(12, 4) UNSIGNED",
        ),
        (
            mysql.MSDouble,
            [12, 4],
            {
                "zerofill": True
            },
            "DOUBLE(12, 4) ZEROFILL",
        ),
        (
            mysql.MSDouble,
            [12, 4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "DOUBLE(12, 4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSReal, [None, None], {}, "REAL"),
        (mysql.MSReal, [12, 4], {
            "unsigned": True
        }, "REAL(12, 4) UNSIGNED"),
        (mysql.MSReal, [12, 4], {
            "zerofill": True
        }, "REAL(12, 4) ZEROFILL"),
        (
            mysql.MSReal,
            [12, 4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "REAL(12, 4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSFloat, [], {}, "FLOAT"),
        (mysql.MSFloat, [None], {}, "FLOAT"),
        (mysql.MSFloat, [12], {}, "FLOAT(12)"),
        (mysql.MSFloat, [12, 4], {}, "FLOAT(12, 4)"),
        (mysql.MSFloat, [12, 4], {
            "unsigned": True
        }, "FLOAT(12, 4) UNSIGNED"),
        (mysql.MSFloat, [12, 4], {
            "zerofill": True
        }, "FLOAT(12, 4) ZEROFILL"),
        (
            mysql.MSFloat,
            [12, 4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "FLOAT(12, 4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSInteger, [], {}, "INTEGER"),
        (mysql.MSInteger, [4], {}, "INTEGER(4)"),
        (mysql.MSInteger, [4], {
            "unsigned": True
        }, "INTEGER(4) UNSIGNED"),
        (mysql.MSInteger, [4], {
            "zerofill": True
        }, "INTEGER(4) ZEROFILL"),
        (
            mysql.MSInteger,
            [4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "INTEGER(4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSBigInteger, [], {}, "BIGINT"),
        (mysql.MSBigInteger, [4], {}, "BIGINT(4)"),
        (mysql.MSBigInteger, [4], {
            "unsigned": True
        }, "BIGINT(4) UNSIGNED"),
        (mysql.MSBigInteger, [4], {
            "zerofill": True
        }, "BIGINT(4) ZEROFILL"),
        (
            mysql.MSBigInteger,
            [4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "BIGINT(4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSMediumInteger, [], {}, "MEDIUMINT"),
        (mysql.MSMediumInteger, [4], {}, "MEDIUMINT(4)"),
        (
            mysql.MSMediumInteger,
            [4],
            {
                "unsigned": True
            },
            "MEDIUMINT(4) UNSIGNED",
        ),
        (
            mysql.MSMediumInteger,
            [4],
            {
                "zerofill": True
            },
            "MEDIUMINT(4) ZEROFILL",
        ),
        (
            mysql.MSMediumInteger,
            [4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "MEDIUMINT(4) UNSIGNED ZEROFILL",
        ),
        (mysql.MSTinyInteger, [], {}, "TINYINT"),
        (mysql.MSTinyInteger, [1], {}, "TINYINT(1)"),
        (mysql.MSTinyInteger, [1], {
            "unsigned": True
        }, "TINYINT(1) UNSIGNED"),
        (mysql.MSTinyInteger, [1], {
            "zerofill": True
        }, "TINYINT(1) ZEROFILL"),
        (
            mysql.MSTinyInteger,
            [1],
            {
                "zerofill": True,
                "unsigned": True
            },
            "TINYINT(1) UNSIGNED ZEROFILL",
        ),
        (mysql.MSSmallInteger, [], {}, "SMALLINT"),
        (mysql.MSSmallInteger, [4], {}, "SMALLINT(4)"),
        (
            mysql.MSSmallInteger,
            [4],
            {
                "unsigned": True
            },
            "SMALLINT(4) UNSIGNED",
        ),
        (
            mysql.MSSmallInteger,
            [4],
            {
                "zerofill": True
            },
            "SMALLINT(4) ZEROFILL",
        ),
        (
            mysql.MSSmallInteger,
            [4],
            {
                "zerofill": True,
                "unsigned": True
            },
            "SMALLINT(4) UNSIGNED ZEROFILL",
        ),
    )
    def test_numeric(self, type_, args, kw, res):
        "Exercise type specification and options for numeric types."

        type_inst = type_(*args, **kw)
        self.assert_compile(type_inst, res)
        # test that repr() copies out all arguments
        self.assert_compile(eval("mysql.%r" % type_inst), res)

    @testing.combinations(
        (mysql.MSChar, [1], {}, "CHAR(1)"),
        (mysql.NCHAR, [1], {}, "NATIONAL CHAR(1)"),
        (mysql.MSChar, [1], {
            "binary": True
        }, "CHAR(1) BINARY"),
        (mysql.MSChar, [1], {
            "ascii": True
        }, "CHAR(1) ASCII"),
        (mysql.MSChar, [1], {
            "unicode": True
        }, "CHAR(1) UNICODE"),
        (
            mysql.MSChar,
            [1],
            {
                "ascii": True,
                "binary": True
            },
            "CHAR(1) ASCII BINARY",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "unicode": True,
                "binary": True
            },
            "CHAR(1) UNICODE BINARY",
        ),
        (mysql.MSChar, [1], {
            "charset": "utf8"
        }, "CHAR(1) CHARACTER SET utf8"),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "binary": True
            },
            "CHAR(1) CHARACTER SET utf8 BINARY",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "unicode": True
            },
            "CHAR(1) CHARACTER SET utf8",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "ascii": True
            },
            "CHAR(1) CHARACTER SET utf8",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "collation": "utf8_bin"
            },
            "CHAR(1) COLLATE utf8_bin",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "collation": "utf8_bin"
            },
            "CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "binary": True
            },
            "CHAR(1) CHARACTER SET utf8 BINARY",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "charset": "utf8",
                "collation": "utf8_bin",
                "binary": True
            },
            "CHAR(1) CHARACTER SET utf8 COLLATE utf8_bin",
        ),
        (mysql.MSChar, [1], {
            "national": True
        }, "NATIONAL CHAR(1)"),
        (
            mysql.MSChar,
            [1],
            {
                "national": True,
                "charset": "utf8"
            },
            "NATIONAL CHAR(1)",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "national": True,
                "charset": "utf8",
                "binary": True
            },
            "NATIONAL CHAR(1) BINARY",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "national": True,
                "binary": True,
                "unicode": True
            },
            "NATIONAL CHAR(1) BINARY",
        ),
        (
            mysql.MSChar,
            [1],
            {
                "national": True,
                "collation": "utf8_bin"
            },
            "NATIONAL CHAR(1) COLLATE utf8_bin",
        ),
        (
            mysql.MSString,
            [1],
            {
                "charset": "utf8",
                "collation": "utf8_bin"
            },
            "VARCHAR(1) CHARACTER SET utf8 COLLATE utf8_bin",
        ),
        (
            mysql.MSString,
            [1],
            {
                "national": True,
                "collation": "utf8_bin"
            },
            "NATIONAL VARCHAR(1) COLLATE utf8_bin",
        ),
        (
            mysql.MSTinyText,
            [],
            {
                "charset": "utf8",
                "collation": "utf8_bin"
            },
            "TINYTEXT CHARACTER SET utf8 COLLATE utf8_bin",
        ),
        (
            mysql.MSMediumText,
            [],
            {
                "charset": "utf8",
                "binary": True
            },
            "MEDIUMTEXT CHARACTER SET utf8 BINARY",
        ),
        (mysql.MSLongText, [], {
            "ascii": True
        }, "LONGTEXT ASCII"),
        (
            mysql.ENUM,
            ["foo", "bar"],
            {
                "unicode": True
            },
            """ENUM('foo','bar') UNICODE""",
        ),
        (String, [20], {
            "collation": "utf8"
        }, "VARCHAR(20) COLLATE utf8"),
    )
    @testing.exclude("mysql", "<", (4, 1, 1), "no charset support")
    def test_charset(self, type_, args, kw, res):
        """Exercise CHARACTER SET and COLLATE-ish options on string types."""

        type_inst = type_(*args, **kw)
        self.assert_compile(type_inst, res)

    @testing.combinations(
        (mysql.MSBit(), "BIT"),
        (mysql.MSBit(1), "BIT(1)"),
        (mysql.MSBit(63), "BIT(63)"),
    )
    def test_bit_50(self, type_, expected):
        """Exercise BIT types on 5.0+ (not valid for all engine types)"""

        self.assert_compile(type_, expected)

    @testing.combinations(
        (BOOLEAN(), "BOOL"),
        (Boolean(), "BOOL"),
        (mysql.TINYINT(1), "TINYINT(1)"),
        (mysql.TINYINT(1, unsigned=True), "TINYINT(1) UNSIGNED"),
    )
    def test_boolean_compile(self, type_, expected):
        self.assert_compile(type_, expected)

    def test_timestamp_fsp(self):
        self.assert_compile(mysql.TIMESTAMP(fsp=5), "TIMESTAMP(5)")

    @testing.combinations(
        ([TIMESTAMP], {}, "TIMESTAMP NULL"),
        ([mysql.MSTimeStamp], {}, "TIMESTAMP NULL"),
        (
            [
                mysql.MSTimeStamp(),
                DefaultClause(sql.text("CURRENT_TIMESTAMP")),
            ],
            {},
            "TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP",
        ),
        (
            [mysql.MSTimeStamp,
             DefaultClause(sql.text("CURRENT_TIMESTAMP"))],
            {
                "nullable": False
            },
            "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
        ),
        (
            [
                mysql.MSTimeStamp,
                DefaultClause(sql.text("'1999-09-09 09:09:09'")),
            ],
            {
                "nullable": False
            },
            "TIMESTAMP NOT NULL DEFAULT '1999-09-09 09:09:09'",
        ),
        (
            [
                mysql.MSTimeStamp(),
                DefaultClause(sql.text("'1999-09-09 09:09:09'")),
            ],
            {},
            "TIMESTAMP NULL DEFAULT '1999-09-09 09:09:09'",
        ),
        (
            [
                mysql.MSTimeStamp(),
                DefaultClause(
                    sql.text("'1999-09-09 09:09:09' "
                             "ON UPDATE CURRENT_TIMESTAMP")),
            ],
            {},
            "TIMESTAMP NULL DEFAULT '1999-09-09 09:09:09' "
            "ON UPDATE CURRENT_TIMESTAMP",
        ),
        (
            [
                mysql.MSTimeStamp,
                DefaultClause(
                    sql.text("'1999-09-09 09:09:09' "
                             "ON UPDATE CURRENT_TIMESTAMP")),
            ],
            {
                "nullable": False
            },
            "TIMESTAMP NOT NULL DEFAULT '1999-09-09 09:09:09' "
            "ON UPDATE CURRENT_TIMESTAMP",
        ),
        (
            [
                mysql.MSTimeStamp(),
                DefaultClause(
                    sql.text("CURRENT_TIMESTAMP "
                             "ON UPDATE CURRENT_TIMESTAMP")),
            ],
            {},
            "TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP "
            "ON UPDATE CURRENT_TIMESTAMP",
        ),
        (
            [
                mysql.MSTimeStamp,
                DefaultClause(
                    sql.text("CURRENT_TIMESTAMP "
                             "ON UPDATE CURRENT_TIMESTAMP")),
            ],
            {
                "nullable": False
            },
            "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP "
            "ON UPDATE CURRENT_TIMESTAMP",
        ),
    )
    def test_timestamp_defaults(self, spec, kw, expected):
        """Exercise funky TIMESTAMP default syntax when used in columns."""

        c = Column("t", *spec, **kw)
        Table("t", MetaData(), c)
        self.assert_compile(schema.CreateColumn(c), "t %s" % expected)

    def test_datetime_generic(self):
        self.assert_compile(mysql.DATETIME(), "DATETIME")

    def test_datetime_fsp(self):
        self.assert_compile(mysql.DATETIME(fsp=4), "DATETIME(4)")

    def test_time_generic(self):
        """Exercise TIME."""

        self.assert_compile(mysql.TIME(), "TIME")

    def test_time_fsp(self):
        self.assert_compile(mysql.TIME(fsp=5), "TIME(5)")

    def test_time_result_processor(self):
        eq_(
            mysql.TIME().result_processor(None, None)(datetime.timedelta(
                seconds=35, minutes=517, microseconds=450)),
            datetime.time(8, 37, 35, 450),
        )
Example #5
0
from sqlalchemy.schema import PrimaryKeyConstraint

from credovi.schema import metadata, schema
from credovi.util.sqlalchemy import PTree, comment_on_table_elements

biomolecules = Table(
    'biomolecules',
    metadata,
    Column('biomolecule_id', Integer, nullable=False),
    Column('structure_id', Integer, nullable=False),
    Column('path', PTree, nullable=False),
    Column('assembly_serial', Integer, nullable=False),
    Column('assembly_type', String(32)),  # e.g. monomeric
    Column('conformational_state_bm',
           Integer,
           DefaultClause('0'),
           nullable=False),
    Column('structural_interaction_bm',
           Integer,
           DefaultClause('0'),
           nullable=False),
    Column('num_chains', Integer, DefaultClause('0'), nullable=False),
    Column('num_ligands', Integer, DefaultClause('0'), nullable=False),
    Column('num_atoms', Integer, DefaultClause('0'), nullable=False),
    schema=schema)

PrimaryKeyConstraint(biomolecules.c.biomolecule_id,
                     deferrable=True,
                     initially='deferred')
Index('idx_biomolecules_structures_id',
      biomolecules.c.structure_id,
Example #6
0
    def test_autoincrement(self):
        Table(
            "ai_1",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_2",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_3",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )

        Table(
            "ai_4",
            metadata,
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            Column("int_n2", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_5",
            metadata,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
        )
        Table(
            "ai_6",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
        )
        Table(
            "ai_7",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, autoincrement=True, primary_key=True),
        )
        Table(
            "ai_8",
            metadata,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
        )
        metadata.create_all()

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if "int_y" in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, "auto")
                    is_not_(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == "mxodbc":
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": True})
                ]
            else:
                eng = [
                    engines.testing_engine(
                        options={"implicit_returning": False}),
                    engines.testing_engine(
                        options={"implicit_returning": True}),
                ]

            for counter, engine in enumerate(eng):
                with engine.begin() as conn:
                    conn.execute(tbl.insert())
                    if "int_y" in tbl.c:
                        eq_(
                            conn.execute(select([tbl.c.int_y])).scalar(),
                            counter + 1,
                        )
                        assert (list(conn.execute(
                            tbl.select()).first()).count(counter + 1) == 1)
                    else:
                        assert 1 not in list(
                            conn.execute(tbl.select()).first())
                    conn.execute(tbl.delete())
Example #7
0
    def test_autoincrement(self):
        meta = self.metadata
        Table(
            "ai_1",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_2",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_3",
            meta,
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_4",
            meta,
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            Column(
                "int_n2",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_5",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_6",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_7",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_8",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            mysql_engine="MyISAM",
        )
        meta.create_all(testing.db)

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData()
        mr.reflect(testing.db, only=table_names)

        with testing.db.begin() as conn:
            for tbl in [mr.tables[name] for name in table_names]:
                for c in tbl.c:
                    if c.name.startswith("int_y"):
                        assert c.autoincrement
                    elif c.name.startswith("int_n"):
                        assert not c.autoincrement
                conn.execute(tbl.insert())
                if "int_y" in tbl.c:
                    assert conn.scalar(select(tbl.c.int_y)) == 1
                    assert (
                        list(conn.execute(tbl.select()).first()).count(1) == 1
                    )
                else:
                    assert 1 not in list(conn.execute(tbl.select()).first())
Example #8
0
def upgrade():
    op.add_column('users', Column('first_name', sa.String(255), DefaultClause(""), nullable=False))
    op.add_column('users', Column('last_name', sa.String(255), DefaultClause(""), nullable=False))
    op.add_column('users', Column('email', sa.String(255), DefaultClause(""), nullable=False))
Example #9
0
 def test_empty_insert_pk3(self):
     assert_raises(exc.DBAPIError, self._test_empty_insert, Table('c'
                   , MetaData(testing.db), Column('x', Integer,
                   primary_key=True), Column('y', Integer,
                   DefaultClause('123'), primary_key=True)))
def upgrade():
    op.drop_column('user_source', 'username')
    op.add_column(
        'user_source',
        Column('user_id', sa.Integer, DefaultClause(-1), nullable=False))
def downgrade():
    op.drop_column('user_source', 'user_id')
    op.add_column(
        'user_source',
        Column('username', sa.String(255), DefaultClause(""), nullable=False))
def upgrade():
    op.add_column(
        'new', Column('url',
                      sa.String(2083),
                      DefaultClause(""),
                      nullable=False))
Example #13
0
# residues master table (won't be empty!)
residues = Table(
    'residues',
    metadata,
    Column('residue_id', Integer, nullable=False),
    Column('biomolecule_id', Integer, nullable=False),
    Column('chain_id', Integer, nullable=False),
    Column('path', PTree, nullable=False),
    Column('res_name', String(3), nullable=False),
    Column('res_num', Integer, nullable=False),
    Column('ins_code', String(1), nullable=False),  # ' '
    Column('entity_type_bm', Integer, nullable=False),
    Column('is_disordered',
           Boolean(create_constraint=False),
           DefaultClause('false'),
           nullable=False),
    Column('is_incomplete',
           Boolean(create_constraint=False),
           DefaultClause('false'),
           nullable=False),
    schema=schema)

PrimaryKeyConstraint(residues.c.residue_id,
                     deferrable=True,
                     initially='deferred')
Index('idx_residues_biomolecule_id', residues.c.biomolecule_id)
Index('idx_residues_chain_id', residues.c.chain_id, residues.c.res_num,
      residues.c.ins_code)
Index('idx_residues_path', residues.c.path, postgresql_using='gist')
Example #14
0
from credovi.schema import metadata, schema
from credovi.util.sqlalchemy import PTree, comment_on_table_elements

prot_fragments = Table('prot_fragments', metadata,
                       Column('prot_fragment_id', Integer, nullable=False),
                       Column('biomolecule_id', Integer, nullable=False),
                       Column('chain_id', Integer, nullable=False),
                       Column('path', PTree),
                       Column('sstruct_serial', Integer),
                       Column('sstruct', String(1)),
                       Column('fragment_size', Integer, nullable=False),
                       Column('fragment_seq', Text),
                       Column('prot_fragment_nterm_id', Integer),
                       Column('prot_fragment_cterm_id', Integer),
                       Column('completeness', Float(3,2), DefaultClause('0'), nullable=False),
                       schema=schema)

PrimaryKeyConstraint(prot_fragments.c.prot_fragment_id, deferrable=True, initially='deferred')
Index('idx_prot_fragments_biomolecule_id', prot_fragments.c.biomolecule_id)
Index('idx_prot_fragments_chain', prot_fragments.c.chain_id, prot_fragments.c.sstruct_serial, unique=True)
Index('idx_prot_fragments_path', prot_fragments.c.path, postgresql_using='gist')
Index('idx_prot_fragments_seq', prot_fragments.c.fragment_seq)

prot_fragment_comments = {
    "table": "Contains information about secondary structure fragments in proteins.",
    "columns":
    {
        "prot_fragment_id": "Primary key of the protein fragment.",
        "biomolecule_id": "Primary key of the biomolecule the secondary structure fragment belongs to.",
        "chain_id": "Primary key of the chain the secondary structure belongs to.",
Example #15
0
    def test_default_reflection(self):
        """Test reflection of column defaults."""

        from sqlalchemy.dialects.mysql import VARCHAR

        def_table = Table(
            "mysql_def",
            MetaData(testing.db),
            Column(
                "c1",
                VARCHAR(10, collation="utf8_unicode_ci"),
                DefaultClause(""),
                nullable=False,
            ),
            Column("c2", String(10), DefaultClause("0")),
            Column("c3", String(10), DefaultClause("abc")),
            Column("c4", TIMESTAMP, DefaultClause("2009-04-05 12:00:00")),
            Column("c5", TIMESTAMP),
            Column(
                "c6",
                TIMESTAMP,
                DefaultClause(
                    sql.text("CURRENT_TIMESTAMP "
                             "ON UPDATE CURRENT_TIMESTAMP")),
            ),
        )
        def_table.create()
        try:
            reflected = Table("mysql_def", MetaData(testing.db), autoload=True)
        finally:
            def_table.drop()
        assert def_table.c.c1.server_default.arg == ""
        assert def_table.c.c2.server_default.arg == "0"
        assert def_table.c.c3.server_default.arg == "abc"
        assert def_table.c.c4.server_default.arg == "2009-04-05 12:00:00"
        assert str(reflected.c.c1.server_default.arg) == "''"
        assert str(reflected.c.c2.server_default.arg) == "'0'"
        assert str(reflected.c.c3.server_default.arg) == "'abc'"
        assert (str(
            reflected.c.c4.server_default.arg) == "'2009-04-05 12:00:00'")
        assert reflected.c.c5.default is None
        assert reflected.c.c5.server_default is None
        assert reflected.c.c6.default is None
        assert re.match(
            r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
            str(reflected.c.c6.server_default.arg).upper(),
        )
        reflected.create()
        try:
            reflected2 = Table("mysql_def",
                               MetaData(testing.db),
                               autoload=True)
        finally:
            reflected.drop()
        assert str(reflected2.c.c1.server_default.arg) == "''"
        assert str(reflected2.c.c2.server_default.arg) == "'0'"
        assert str(reflected2.c.c3.server_default.arg) == "'abc'"
        assert (str(
            reflected2.c.c4.server_default.arg) == "'2009-04-05 12:00:00'")
        assert reflected.c.c5.default is None
        assert reflected.c.c5.server_default is None
        assert reflected.c.c6.default is None
        assert re.match(
            r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
            str(reflected.c.c6.server_default.arg).upper(),
        )
Example #16
0
class Trip(Base):
    __tablename__ = 'trips'
    id = Column('trip_id', Integer, primary_key=True, index=True)


Location = Table(
    'gotorussia_travels_locations',
    Base.metadata,
    Column(
        'id',
        Integer,
        primary_key=True,
        nullable=False,
        server_default=DefaultClause(
            "nextval('gotorussia_travels_locations_id_seq'::regclass)",
            for_update=False),
    ),
    Column('type_id', JSONB),
    Column('object_title', String(255), nullable=False),
    Column('object_place', String(255)),
    Column('object_coord', String(255)),
    Column('object_description', Text, nullable=False),
    Column('adress', Text),
    Column('region_id', Integer),
    Column('local_id', Integer),
    Column(
        'lat',
        DOUBLE_PRECISION(precision=53),
        server_default=DefaultClause('0', for_update=False),
    ),
Example #17
0
    def test_autoincrement(self):
        meta = MetaData(testing.db)
        try:
            Table(
                "ai_1",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_2",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column("int_n", Integer, DefaultClause("0"), primary_key=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_3",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_4",
                meta,
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                Column(
                    "int_n2",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_5",
                meta,
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                Column(
                    "int_n",
                    Integer,
                    DefaultClause("0"),
                    primary_key=True,
                    autoincrement=False,
                ),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_6",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_7",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                Column("int_y", Integer, primary_key=True, autoincrement=True),
                mysql_engine="MyISAM",
            )
            Table(
                "ai_8",
                meta,
                Column("o1", String(1), DefaultClause("x"), primary_key=True),
                Column("o2", String(1), DefaultClause("x"), primary_key=True),
                mysql_engine="MyISAM",
            )
            meta.create_all()

            table_names = [
                "ai_1",
                "ai_2",
                "ai_3",
                "ai_4",
                "ai_5",
                "ai_6",
                "ai_7",
                "ai_8",
            ]
            mr = MetaData(testing.db)
            mr.reflect(only=table_names)

            for tbl in [mr.tables[name] for name in table_names]:
                for c in tbl.c:
                    if c.name.startswith("int_y"):
                        assert c.autoincrement
                    elif c.name.startswith("int_n"):
                        assert not c.autoincrement
                tbl.insert().execute()
                if "int_y" in tbl.c:
                    assert select([tbl.c.int_y]).scalar() == 1
                    assert list(tbl.select().execute().first()).count(1) == 1
                else:
                    assert 1 not in list(tbl.select().execute().first())
        finally:
            meta.drop_all()
Example #18
0
class Certificate(db.Model):
    __tablename__ = "certificates"
    __table_args__ = (
        Index(
            "ix_certificates_cn",
            "cn",
            postgresql_ops={"cn": "gin_trgm_ops"},
            postgresql_using="gin",
        ),
        Index(
            "ix_certificates_name",
            "name",
            postgresql_ops={"name": "gin_trgm_ops"},
            postgresql_using="gin",
        ),
    )
    id = Column(Integer, primary_key=True)
    ix = Index("ix_certificates_id_desc",
               id.desc(),
               postgresql_using="btree",
               unique=True)
    external_id = Column(String(128))
    owner = Column(String(128), nullable=False)
    name = Column(String(256), unique=True)
    description = Column(String(1024))
    notify = Column(Boolean, default=True)

    body = Column(Text(), nullable=False)
    chain = Column(Text())
    csr = Column(Text())
    private_key = Column(Vault)

    issuer = Column(String(128))
    serial = Column(String(128))
    cn = Column(String(128))
    deleted = Column(Boolean, index=True, default=False)
    dns_provider_id = Column(Integer(),
                             ForeignKey("dns_providers.id",
                                        ondelete="CASCADE"),
                             nullable=True)

    not_before = Column(ArrowType)
    not_after = Column(ArrowType)
    not_after_ix = Index("ix_certificates_not_after", not_after.desc())

    date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)

    signing_algorithm = Column(String(128))
    status = Column(String(128))
    bits = Column(Integer())
    san = Column(String(1024))  # TODO this should be migrated to boolean

    rotation = Column(Boolean, default=False)
    user_id = Column(Integer, ForeignKey("users.id"))
    authority_id = Column(Integer,
                          ForeignKey("authorities.id", ondelete="CASCADE"))
    root_authority_id = Column(
        Integer, ForeignKey("authorities.id", ondelete="CASCADE"))
    rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id"))
    key_type = Column(String(128))

    notifications = relationship(
        "Notification",
        secondary=certificate_notification_associations,
        backref="certificate",
    )
    destinations = relationship(
        "Destination",
        secondary=certificate_destination_associations,
        backref="certificate",
    )
    sources = relationship("Source",
                           secondary=certificate_source_associations,
                           backref="certificate")
    domains = relationship("Domain",
                           secondary=certificate_associations,
                           backref="certificate")
    roles = relationship("Role",
                         secondary=roles_certificates,
                         backref="certificate")
    replaces = relationship(
        "Certificate",
        secondary=certificate_replacement_associations,
        primaryjoin=id ==
        certificate_replacement_associations.c.certificate_id,  # noqa
        secondaryjoin=id ==
        certificate_replacement_associations.c.replaced_certificate_id,  # noqa
        backref="replaced",
    )

    replaced_by_pending = relationship(
        "PendingCertificate",
        secondary=pending_cert_replacement_associations,
        backref="pending_replace",
    )

    logs = relationship("Log", backref="certificate")
    endpoints = relationship("Endpoint", backref="certificate")
    rotation_policy = relationship("RotationPolicy")
    sensitive_fields = ("private_key", )

    def __init__(self, **kwargs):
        self.body = kwargs["body"].strip()
        cert = self.parsed_cert

        self.issuer = defaults.issuer(cert)
        self.cn = defaults.common_name(cert)
        self.san = defaults.san(cert)
        self.not_before = defaults.not_before(cert)
        self.not_after = defaults.not_after(cert)
        self.serial = defaults.serial(cert)

        # when destinations are appended they require a valid name.
        if kwargs.get("name"):
            self.name = get_or_increase_name(
                defaults.text_to_slug(kwargs["name"]), self.serial)
        else:
            self.name = get_or_increase_name(
                defaults.certificate_name(self.cn, self.issuer,
                                          self.not_before, self.not_after,
                                          self.san),
                self.serial,
            )

        self.owner = kwargs["owner"]

        if kwargs.get("private_key"):
            self.private_key = kwargs["private_key"].strip()

        if kwargs.get("chain"):
            self.chain = kwargs["chain"].strip()

        if kwargs.get("csr"):
            self.csr = kwargs["csr"].strip()

        self.notify = kwargs.get("notify", True)
        self.destinations = kwargs.get("destinations", [])
        self.notifications = kwargs.get("notifications", [])
        self.description = kwargs.get("description")
        self.roles = list(set(kwargs.get("roles", [])))
        self.replaces = kwargs.get("replaces", [])
        self.rotation = kwargs.get("rotation")
        self.rotation_policy = kwargs.get("rotation_policy")
        self.key_type = kwargs.get("key_type")
        self.signing_algorithm = defaults.signing_algorithm(cert)
        self.bits = defaults.bitstrength(cert)
        self.external_id = kwargs.get("external_id")
        self.authority_id = kwargs.get("authority_id")
        self.dns_provider_id = kwargs.get("dns_provider_id")

        for domain in defaults.domains(cert):
            self.domains.append(Domain(name=domain))

        # Check integrity before saving anything into the database.
        # For user-facing API calls, validation should also be done in schema validators.
        self.check_integrity()

    def check_integrity(self):
        """
        Integrity checks: Does the cert have a valid chain and matching private key?
        """
        if self.private_key:
            validators.verify_private_key_match(
                utils.parse_private_key(self.private_key),
                self.parsed_cert,
                error_class=AssertionError,
            )

        if self.chain:
            chain = [self.parsed_cert] + utils.parse_cert_chain(self.chain)
            validators.verify_cert_chain(chain, error_class=AssertionError)

    @cached_property
    def parsed_cert(self):
        assert self.body, "Certificate body not set"
        return utils.parse_certificate(self.body)

    @property
    def active(self):
        return self.notify

    @property
    def organization(self):
        return defaults.organization(self.parsed_cert)

    @property
    def organizational_unit(self):
        return defaults.organizational_unit(self.parsed_cert)

    @property
    def country(self):
        return defaults.country(self.parsed_cert)

    @property
    def state(self):
        return defaults.state(self.parsed_cert)

    @property
    def location(self):
        return defaults.location(self.parsed_cert)

    @property
    def distinguished_name(self):
        return self.parsed_cert.subject.rfc4514_string()

    """
    # Commenting this property as key_type is now added as a column. This code can be removed in future.
    @property
    def key_type(self):
        if isinstance(self.parsed_cert.public_key(), rsa.RSAPublicKey):
            return "RSA{key_size}".format(
                key_size=self.parsed_cert.public_key().key_size
            )
        elif isinstance(self.parsed_cert.public_key(), ec.EllipticCurvePublicKey):
            return get_key_type_from_ec_curve(self.parsed_cert.public_key().curve.name)
    """

    @property
    def validity_remaining(self):
        return abs(self.not_after - arrow.utcnow())

    @property
    def validity_range(self):
        return self.not_after - self.not_before

    @property
    def subject(self):
        return self.parsed_cert.subject

    @property
    def public_key(self):
        return self.parsed_cert.public_key()

    @hybrid_property
    def expired(self):
        # can't compare offset-naive and offset-aware datetimes
        if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow():
            return True

    @expired.expression
    def expired(cls):
        return case([(cls.not_after <= arrow.utcnow(), True)], else_=False)

    @hybrid_property
    def revoked(self):
        if "revoked" == self.status:
            return True

    @revoked.expression
    def revoked(cls):
        return case([(cls.status == "revoked", True)], else_=False)

    @hybrid_property
    def has_private_key(self):
        return self.private_key is not None

    @has_private_key.expression
    def has_private_key(cls):
        return case([(cls.private_key.is_(None), True)], else_=False)

    @hybrid_property
    def in_rotation_window(self):
        """
        Determines if a certificate is available for rotation based
        on the rotation policy associated.
        :return:
        """
        now = arrow.utcnow()
        end = now + timedelta(days=self.rotation_policy.days)

        if self.not_after <= end:
            return True

    @in_rotation_window.expression
    def in_rotation_window(cls):
        """
        Determines if a certificate is available for rotation based
        on the rotation policy associated.
        :return:
        """
        return case(
            [(extract("day", cls.not_after - func.now()) <=
              RotationPolicy.days, True)],
            else_=False,
        )

    @property
    def extensions(self):
        # setup default values
        return_extensions = {"sub_alt_names": {"names": []}}

        try:
            for extension in self.parsed_cert.extensions:
                value = extension.value
                if isinstance(value, x509.BasicConstraints):
                    return_extensions["basic_constraints"] = value

                elif isinstance(value, x509.SubjectAlternativeName):
                    return_extensions["sub_alt_names"]["names"] = value

                elif isinstance(value, x509.ExtendedKeyUsage):
                    return_extensions["extended_key_usage"] = value

                elif isinstance(value, x509.KeyUsage):
                    return_extensions["key_usage"] = value

                elif isinstance(value, x509.SubjectKeyIdentifier):
                    return_extensions["subject_key_identifier"] = {
                        "include_ski": True
                    }

                elif isinstance(value, x509.AuthorityInformationAccess):
                    return_extensions["certificate_info_access"] = {
                        "include_aia": True
                    }

                elif isinstance(value, x509.AuthorityKeyIdentifier):
                    aki = {
                        "use_key_identifier": False,
                        "use_authority_cert": False
                    }

                    if value.key_identifier:
                        aki["use_key_identifier"] = True

                    if value.authority_cert_issuer:
                        aki["use_authority_cert"] = True

                    return_extensions["authority_key_identifier"] = aki

                elif isinstance(value, x509.CRLDistributionPoints):
                    return_extensions["crl_distribution_points"] = {
                        "include_crl_dp": value
                    }

                # TODO: Not supporting custom OIDs yet. https://github.com/Netflix/lemur/issues/665
                else:
                    current_app.logger.warning(
                        "Custom OIDs not yet supported for clone operation.")
        except InvalidCodepoint as e:
            sentry.captureException()
            current_app.logger.warning(
                "Unable to parse extensions due to underscore in dns name")
        except ValueError as e:
            sentry.captureException()
            current_app.logger.warning("Unable to parse")
            current_app.logger.exception(e)

        return return_extensions

    def __repr__(self):
        return "Certificate(name={name})".format(name=self.name)
Example #19
0
    def test_default_reflection(self):
        """Test reflection of column defaults."""

        # TODO: this test is a mess.   should be broken into individual
        # combinations

        from sqlalchemy.dialects.mysql import VARCHAR

        def_table = Table(
            "mysql_def",
            MetaData(),
            Column(
                "c1",
                VARCHAR(10, collation="utf8_unicode_ci"),
                DefaultClause(""),
                nullable=False,
            ),
            Column("c2", String(10), DefaultClause("0")),
            Column("c3", String(10), DefaultClause("abc")),
            Column("c4", TIMESTAMP, DefaultClause("2009-04-05 12:00:00")),
            Column("c5", TIMESTAMP),
            Column(
                "c6",
                TIMESTAMP,
                DefaultClause(
                    sql.text(
                        "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP"
                    )
                ),
            ),
            Column("c7", mysql.DOUBLE(), DefaultClause("0.0000")),
            Column("c8", mysql.DOUBLE(22, 6), DefaultClause("0.0000")),
        )

        def_table.create(testing.db)
        try:
            reflected = Table(
                "mysql_def", MetaData(), autoload_with=testing.db
            )
        finally:
            def_table.drop(testing.db)
        assert def_table.c.c1.server_default.arg == ""
        assert def_table.c.c2.server_default.arg == "0"
        assert def_table.c.c3.server_default.arg == "abc"
        assert def_table.c.c4.server_default.arg == "2009-04-05 12:00:00"
        assert str(reflected.c.c1.server_default.arg) == "''"
        assert str(reflected.c.c2.server_default.arg) == "'0'"
        assert str(reflected.c.c3.server_default.arg) == "'abc'"
        assert (
            str(reflected.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
        )
        assert reflected.c.c5.default is None
        assert reflected.c.c5.server_default is None
        assert reflected.c.c6.default is None
        assert str(reflected.c.c7.server_default.arg) in ("0", "'0'")

        # this is because the numeric is 6 decimal places, MySQL
        # formats it to that many places.
        assert str(reflected.c.c8.server_default.arg) in (
            "0.000000",
            "'0.000000'",
        )

        assert re.match(
            r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
            str(reflected.c.c6.server_default.arg).upper(),
        )
        reflected.create(testing.db)
        try:
            reflected2 = Table(
                "mysql_def", MetaData(), autoload_with=testing.db
            )
        finally:
            reflected.drop(testing.db)
        assert str(reflected2.c.c1.server_default.arg) == "''"
        assert str(reflected2.c.c2.server_default.arg) == "'0'"
        assert str(reflected2.c.c3.server_default.arg) == "'abc'"
        assert (
            str(reflected2.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
        )
        assert reflected.c.c5.default is None
        assert reflected.c.c5.server_default is None
        assert reflected.c.c6.default is None
        assert str(reflected.c.c7.server_default.arg) in ("0", "'0'")
        assert str(reflected.c.c8.server_default.arg) in (
            "0.000000",
            "'0.000000'",
        )
        assert re.match(
            r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
            str(reflected.c.c6.server_default.arg).upper(),
        )
Example #20
0
    }
}

comment_on_table_elements(pi_atoms, pi_atom_comments)
create_partition_insert_trigger(pi_atoms, PI_GROUP_PARTITION_SIZE)

# interactions between pi groups (and rings)
pi_interactions = Table(
    'pi_interactions',
    metadata,
    Column('pi_interaction_id', Integer, primary_key=True),
    Column('biomolecule_id', Integer, nullable=False),
    Column('pi_bgn_id', Integer, nullable=False),
    Column('pi_bgn_is_ring',
           Boolean(create_constraint=False),
           DefaultClause('false'),
           nullable=False),
    Column('pi_end_id', Integer, nullable=False),
    Column('pi_end_is_ring',
           Boolean(create_constraint=False),
           DefaultClause('false'),
           nullable=False),
    # Column('closest_atom_bgn_id', Integer),
    # Column('closest_atom_end_id', Integer),
    Column('distance', Float(3, 2), nullable=False),
    # Column('closest_atom_distance', Float(3,2)),
    Column('dihedral', Float(5, 2), nullable=False),
    Column('theta', Float(5, 2), nullable=False),
    Column('iota', Float(5, 2), nullable=False),
    Column(
        'interaction_type', String(2)
Example #21
0
    def test_autoincrement(self):
        Table('ai_1', metadata,
              Column('int_y', Integer, primary_key=True, autoincrement=True),
              Column('int_n', Integer, DefaultClause('0'), primary_key=True))
        Table('ai_2', metadata,
              Column('int_y', Integer, primary_key=True, autoincrement=True),
              Column('int_n', Integer, DefaultClause('0'), primary_key=True))
        Table('ai_3', metadata,
              Column('int_n', Integer, DefaultClause('0'), primary_key=True),
              Column('int_y', Integer, primary_key=True, autoincrement=True))

        Table('ai_4', metadata,
              Column('int_n', Integer, DefaultClause('0'), primary_key=True),
              Column('int_n2', Integer, DefaultClause('0'), primary_key=True))
        Table('ai_5', metadata,
              Column('int_y', Integer, primary_key=True, autoincrement=True),
              Column('int_n', Integer, DefaultClause('0'), primary_key=True))
        Table('ai_6', metadata,
              Column('o1', String(1), DefaultClause('x'), primary_key=True),
              Column('int_y', Integer, primary_key=True, autoincrement=True))
        Table('ai_7', metadata,
              Column('o1', String(1), DefaultClause('x'), primary_key=True),
              Column('o2', String(1), DefaultClause('x'), primary_key=True),
              Column('int_y', Integer, autoincrement=True, primary_key=True))
        Table('ai_8', metadata,
              Column('o1', String(1), DefaultClause('x'), primary_key=True),
              Column('o2', String(1), DefaultClause('x'), primary_key=True))
        metadata.create_all()

        table_names = [
            'ai_1', 'ai_2', 'ai_3', 'ai_4', 'ai_5', 'ai_6', 'ai_7', 'ai_8'
        ]
        mr = MetaData(testing.db)

        for name in table_names:
            tbl = Table(name, mr, autoload=True)
            tbl = metadata.tables[name]

            # test that the flag itself reflects appropriately
            for col in tbl.c:
                if 'int_y' in col.name:
                    is_(col.autoincrement, True)
                    is_(tbl._autoincrement_column, col)
                else:
                    eq_(col.autoincrement, 'auto')
                    is_not_(tbl._autoincrement_column, col)

            # mxodbc can't handle scope_identity() with DEFAULT VALUES

            if testing.db.driver == 'mxodbc':
                eng = \
                    [engines.testing_engine(options={
                        'implicit_returning': True})]
            else:
                eng = \
                    [engines.testing_engine(options={
                        'implicit_returning': False}),
                     engines.testing_engine(options={
                         'implicit_returning': True})]

            for counter, engine in enumerate(eng):
                engine.execute(tbl.insert())
                if 'int_y' in tbl.c:
                    assert engine.scalar(select([tbl.c.int_y])) \
                        == counter + 1
                    assert list(
                        engine.execute(tbl.select()).first()).\
                        count(counter + 1) == 1
                else:
                    assert 1 \
                        not in list(engine.execute(tbl.select()).first())
                engine.execute(tbl.delete())
Example #22
0
class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):

    __only_on__ = "mysql", "mariadb"
    __backend__ = True

    @testing.combinations(
        (
            mysql.VARCHAR(10, collation="utf8_unicode_ci"),
            DefaultClause(""),
            "''",
        ),
        (String(10), DefaultClause("abc"), "'abc'"),
        (String(10), DefaultClause("0"), "'0'"),
        (
            TIMESTAMP,
            DefaultClause("2009-04-05 12:00:00"),
            "'2009-04-05 12:00:00'",
        ),
        (TIMESTAMP, None, None),
        (
            TIMESTAMP,
            DefaultClause(
                sql.text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP")),
            re.compile(
                r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
                re.I,
            ),
        ),
        (mysql.DOUBLE(), DefaultClause("0.0000"), "0"),
        (mysql.DOUBLE(22, 6), DefaultClause("0.0000"), "0.000000"),
        (Integer, DefaultClause("1"), "1"),
        (Integer, DefaultClause("-1"), "-1"),
        (mysql.DOUBLE, DefaultClause("-25.03"), "-25.03"),
        (mysql.DOUBLE, DefaultClause("-.001"), "-0.001"),
        argnames="datatype, default, expected",
    )
    def test_default_reflection(self, datatype, default, expected, metadata,
                                connection):
        t1 = Table("t1", metadata, Column("x", datatype, default))
        t1.create(connection)
        insp = inspect(connection)

        datatype_inst = types.to_instance(datatype)

        col = insp.get_columns("t1")[0]
        if hasattr(expected, "match"):
            assert expected.match(col["default"])
        elif isinstance(datatype_inst, (Integer, Numeric)):
            pattern = re.compile(r"\'?%s\'?" % expected)
            assert pattern.match(col["default"])
        else:
            eq_(col["default"], expected)

    def test_reflection_with_table_options(self, metadata, connection):
        comment = r"""Comment types type speedily ' " \ '' Fun!"""
        if testing.against("mariadb"):
            kwargs = dict(
                mariadb_engine="MEMORY",
                mariadb_default_charset="utf8",
                mariadb_auto_increment="5",
                mariadb_avg_row_length="3",
                mariadb_password="******",
                mariadb_connection="fish",
            )
        else:
            kwargs = dict(
                mysql_engine="MEMORY",
                mysql_default_charset="utf8",
                mysql_auto_increment="5",
                mysql_avg_row_length="3",
                mysql_password="******",
                mysql_connection="fish",
            )

        def_table = Table("mysql_def",
                          metadata,
                          Column("c1", Integer()),
                          comment=comment,
                          **kwargs)

        conn = connection
        def_table.create(conn)
        reflected = Table("mysql_def", MetaData(), autoload_with=conn)

        if testing.against("mariadb"):
            assert def_table.kwargs["mariadb_engine"] == "MEMORY"
            assert def_table.comment == comment
            assert def_table.kwargs["mariadb_default_charset"] == "utf8"
            assert def_table.kwargs["mariadb_auto_increment"] == "5"
            assert def_table.kwargs["mariadb_avg_row_length"] == "3"
            assert def_table.kwargs["mariadb_password"] == "secret"
            assert def_table.kwargs["mariadb_connection"] == "fish"

            assert reflected.kwargs["mariadb_engine"] == "MEMORY"

            assert reflected.comment == comment
            assert reflected.kwargs["mariadb_comment"] == comment
            assert reflected.kwargs["mariadb_default charset"] == "utf8"
            assert reflected.kwargs["mariadb_avg_row_length"] == "3"
            assert reflected.kwargs["mariadb_connection"] == "fish"

            # This field doesn't seem to be returned by mariadb itself.
            # assert reflected.kwargs['mariadb_password'] == 'secret'

            # This is explicitly ignored when reflecting schema.
            # assert reflected.kwargs['mariadb_auto_increment'] == '5'
        else:
            assert def_table.kwargs["mysql_engine"] == "MEMORY"
            assert def_table.comment == comment
            assert def_table.kwargs["mysql_default_charset"] == "utf8"
            assert def_table.kwargs["mysql_auto_increment"] == "5"
            assert def_table.kwargs["mysql_avg_row_length"] == "3"
            assert def_table.kwargs["mysql_password"] == "secret"
            assert def_table.kwargs["mysql_connection"] == "fish"

            assert reflected.kwargs["mysql_engine"] == "MEMORY"

            assert reflected.comment == comment
            assert reflected.kwargs["mysql_comment"] == comment
            assert reflected.kwargs["mysql_default charset"] == "utf8"
            assert reflected.kwargs["mysql_avg_row_length"] == "3"
            assert reflected.kwargs["mysql_connection"] == "fish"

            # This field doesn't seem to be returned by mysql itself.
            # assert reflected.kwargs['mysql_password'] == 'secret'

            # This is explicitly ignored when reflecting schema.
            # assert reflected.kwargs['mysql_auto_increment'] == '5'

    def test_reflection_on_include_columns(self, metadata, connection):
        """Test reflection of include_columns to be sure they respect case."""

        meta = metadata
        case_table = Table(
            "mysql_case",
            meta,
            Column("c1", String(10)),
            Column("C2", String(10)),
            Column("C3", String(10)),
        )

        case_table.create(connection)
        reflected = Table(
            "mysql_case",
            MetaData(),
            autoload_with=connection,
            include_columns=["c1", "C2"],
        )
        for t in case_table, reflected:
            assert "c1" in t.c.keys()
            assert "C2" in t.c.keys()
        reflected2 = Table(
            "mysql_case",
            MetaData(),
            autoload_with=connection,
            include_columns=["c1", "c2"],
        )
        assert "c1" in reflected2.c.keys()
        for c in ["c2", "C2", "C3"]:
            assert c not in reflected2.c.keys()

    def test_autoincrement(self, metadata, connection):
        meta = metadata
        Table(
            "ai_1",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_2",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column("int_n", Integer, DefaultClause("0"), primary_key=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_3",
            meta,
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_4",
            meta,
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            Column(
                "int_n2",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_5",
            meta,
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            Column(
                "int_n",
                Integer,
                DefaultClause("0"),
                primary_key=True,
                autoincrement=False,
            ),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_6",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_7",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            Column("int_y", Integer, primary_key=True, autoincrement=True),
            mysql_engine="MyISAM",
        )
        Table(
            "ai_8",
            meta,
            Column("o1", String(1), DefaultClause("x"), primary_key=True),
            Column("o2", String(1), DefaultClause("x"), primary_key=True),
            mysql_engine="MyISAM",
        )
        meta.create_all(connection)

        table_names = [
            "ai_1",
            "ai_2",
            "ai_3",
            "ai_4",
            "ai_5",
            "ai_6",
            "ai_7",
            "ai_8",
        ]
        mr = MetaData()
        mr.reflect(connection, only=table_names)

        for tbl in [mr.tables[name] for name in table_names]:
            for c in tbl.c:
                if c.name.startswith("int_y"):
                    assert c.autoincrement
                elif c.name.startswith("int_n"):
                    assert not c.autoincrement
            connection.execute(tbl.insert())
            if "int_y" in tbl.c:
                assert connection.scalar(select(tbl.c.int_y)) == 1
                assert (list(connection.execute(
                    tbl.select()).first()).count(1) == 1)
            else:
                assert 1 not in list(connection.execute(tbl.select()).first())

    def test_view_reflection(self, metadata, connection):
        Table("x", metadata, Column("a", Integer), Column("b", String(50)))
        metadata.create_all(connection)

        conn = connection
        conn.exec_driver_sql("CREATE VIEW v1 AS SELECT * FROM x")
        conn.exec_driver_sql(
            "CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x")
        conn.exec_driver_sql(
            "CREATE ALGORITHM=UNDEFINED VIEW v3 AS SELECT * FROM x")
        conn.exec_driver_sql(
            "CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x")

        @event.listens_for(metadata, "before_drop")
        def cleanup(*arg, **kw):
            with testing.db.begin() as conn:
                for v in ["v1", "v2", "v3", "v4"]:
                    conn.exec_driver_sql("DROP VIEW %s" % v)

        insp = inspect(connection)
        for v in ["v1", "v2", "v3", "v4"]:
            eq_(
                [(col["name"], col["type"].__class__)
                 for col in insp.get_columns(v)],
                [("a", mysql.INTEGER), ("b", mysql.VARCHAR)],
            )

    def test_skip_not_describable(self, metadata, connection):
        @event.listens_for(metadata, "before_drop")
        def cleanup(*arg, **kw):
            with testing.db.begin() as conn:
                conn.exec_driver_sql("DROP TABLE IF EXISTS test_t1")
                conn.exec_driver_sql("DROP TABLE IF EXISTS test_t2")
                conn.exec_driver_sql("DROP VIEW IF EXISTS test_v")

        conn = connection
        conn.exec_driver_sql("CREATE TABLE test_t1 (id INTEGER)")
        conn.exec_driver_sql("CREATE TABLE test_t2 (id INTEGER)")
        conn.exec_driver_sql("CREATE VIEW test_v AS SELECT id FROM test_t1")
        conn.exec_driver_sql("DROP TABLE test_t1")

        m = MetaData()
        with expect_warnings(
                "Skipping .* Table or view named .?test_v.? could not be "
                "reflected: .* references invalid table"):
            m.reflect(views=True, bind=conn)
        eq_(m.tables["test_t2"].name, "test_t2")

        assert_raises_message(
            exc.UnreflectableTableError,
            "references invalid table",
            Table,
            "test_v",
            MetaData(),
            autoload_with=conn,
        )

    @testing.exclude("mysql", "<", (5, 0, 0), "no information_schema support")
    def test_system_views(self):
        dialect = testing.db.dialect
        connection = testing.db.connect()
        view_names = dialect.get_view_names(connection, "information_schema")
        self.assert_("TABLES" in view_names)

    def test_nullable_reflection(self, metadata, connection):
        """test reflection of NULL/NOT NULL, in particular with TIMESTAMP
        defaults where MySQL is inconsistent in how it reports CREATE TABLE.

        """
        meta = metadata

        # this is ideally one table, but older MySQL versions choke
        # on the multiple TIMESTAMP columns
        row = connection.exec_driver_sql(
            "show variables like '%%explicit_defaults_for_timestamp%%'").first(
            )
        explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true")

        reflected = []
        for idx, cols in enumerate([
            [
                "x INTEGER NULL",
                "y INTEGER NOT NULL",
                "z INTEGER",
                "q TIMESTAMP NULL",
            ],
            ["p TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP"],
            ["r TIMESTAMP NOT NULL"],
            ["s TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP"],
            ["t TIMESTAMP"],
            ["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"],
        ]):
            Table("nn_t%d" % idx, meta)  # to allow DROP

            connection.exec_driver_sql("""
                    CREATE TABLE nn_t%d (
                        %s
                    )
                """ % (idx, ", \n".join(cols)))

            reflected.extend({
                "name": d["name"],
                "nullable": d["nullable"],
                "default": d["default"],
            } for d in inspect(connection).get_columns("nn_t%d" % idx))

        if connection.dialect._is_mariadb_102:
            current_timestamp = "current_timestamp()"
        else:
            current_timestamp = "CURRENT_TIMESTAMP"

        eq_(
            reflected,
            [
                {
                    "name": "x",
                    "nullable": True,
                    "default": None
                },
                {
                    "name": "y",
                    "nullable": False,
                    "default": None
                },
                {
                    "name": "z",
                    "nullable": True,
                    "default": None
                },
                {
                    "name": "q",
                    "nullable": True,
                    "default": None
                },
                {
                    "name": "p",
                    "nullable": True,
                    "default": current_timestamp
                },
                {
                    "name": "r",
                    "nullable": False,
                    "default": None if explicit_defaults_for_timestamp else
                    ("%(current_timestamp)s "
                     "ON UPDATE %(current_timestamp)s") % {
                         "current_timestamp": current_timestamp
                     },
                },
                {
                    "name": "s",
                    "nullable": False,
                    "default": current_timestamp
                },
                {
                    "name": "t",
                    "nullable":
                    True if explicit_defaults_for_timestamp else False,
                    "default": None if explicit_defaults_for_timestamp else
                    ("%(current_timestamp)s "
                     "ON UPDATE %(current_timestamp)s") % {
                         "current_timestamp": current_timestamp
                     },
                },
                {
                    "name": "u",
                    "nullable":
                    True if explicit_defaults_for_timestamp else False,
                    "default": current_timestamp,
                },
            ],
        )

    def test_reflection_with_unique_constraint(self, metadata, connection):
        insp = inspect(connection)

        meta = metadata
        uc_table = Table(
            "mysql_uc",
            meta,
            Column("a", String(10)),
            UniqueConstraint("a", name="uc_a"),
        )

        uc_table.create(connection)

        # MySQL converts unique constraints into unique indexes.
        # separately we get both
        indexes = dict((i["name"], i) for i in insp.get_indexes("mysql_uc"))
        constraints = set(i["name"]
                          for i in insp.get_unique_constraints("mysql_uc"))

        self.assert_("uc_a" in indexes)
        self.assert_(indexes["uc_a"]["unique"])
        self.assert_("uc_a" in constraints)

        # reflection here favors the unique index, as that's the
        # more "official" MySQL construct
        reflected = Table("mysql_uc", MetaData(), autoload_with=testing.db)

        indexes = dict((i.name, i) for i in reflected.indexes)
        constraints = set(uc.name for uc in reflected.constraints)

        self.assert_("uc_a" in indexes)
        self.assert_(indexes["uc_a"].unique)
        self.assert_("uc_a" not in constraints)

    def test_reflect_fulltext(self, metadata, connection):
        mt = Table(
            "mytable",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("textdata", String(50)),
            mariadb_engine="InnoDB",
            mysql_engine="InnoDB",
        )

        Index(
            "textdata_ix",
            mt.c.textdata,
            mysql_prefix="FULLTEXT",
            mariadb_prefix="FULLTEXT",
        )
        metadata.create_all(connection)

        mt = Table("mytable", MetaData(), autoload_with=testing.db)
        idx = list(mt.indexes)[0]
        eq_(idx.name, "textdata_ix")
        eq_(idx.dialect_options[testing.db.name]["prefix"], "FULLTEXT")
        self.assert_compile(
            CreateIndex(idx),
            "CREATE FULLTEXT INDEX textdata_ix ON mytable (textdata)",
        )

    @testing.requires.mysql_ngram_fulltext
    def test_reflect_fulltext_comment(
        self,
        metadata,
        connection,
    ):
        mt = Table(
            "mytable",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("textdata", String(50)),
            mysql_engine="InnoDB",
        )
        Index(
            "textdata_ix",
            mt.c.textdata,
            mysql_prefix="FULLTEXT",
            mysql_with_parser="ngram",
        )

        metadata.create_all(connection)

        mt = Table("mytable", MetaData(), autoload_with=connection)
        idx = list(mt.indexes)[0]
        eq_(idx.name, "textdata_ix")
        eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
        eq_(idx.dialect_options["mysql"]["with_parser"], "ngram")
        self.assert_compile(
            CreateIndex(idx),
            "CREATE FULLTEXT INDEX textdata_ix ON mytable "
            "(textdata) WITH PARSER ngram",
        )

    def test_non_column_index(self, metadata, connection):
        m1 = metadata
        t1 = Table("add_ix",
                   m1,
                   Column("x", String(50)),
                   mysql_engine="InnoDB")
        Index("foo_idx", t1.c.x.desc())
        m1.create_all(connection)

        insp = inspect(connection)
        eq_(
            insp.get_indexes("add_ix"),
            [{
                "name": "foo_idx",
                "column_names": ["x"],
                "unique": False
            }],
        )

    def _bug_88718_96365_casing_0(self):
        fkeys_casing_0 = [
            {
                "name": "FK_PlaylistTTrackId",
                "constrained_columns": ["TTrackID"],
                "referred_schema": "Test_Schema",
                "referred_table": "Track",
                "referred_columns": ["trackid"],
                "options": {},
            },
            {
                "name": "FK_PlaylistTrackId",
                "constrained_columns": ["TrackID"],
                "referred_schema": None,
                "referred_table": "Track",
                "referred_columns": ["trackid"],
                "options": {},
            },
        ]
        ischema_casing_0 = [
            ("Test", "Track", "TrackID"),
            ("Test_Schema", "Track", "TrackID"),
        ]
        return fkeys_casing_0, ischema_casing_0

    def _bug_88718_96365_casing_1(self):
        fkeys_casing_1 = [
            {
                "name": "FK_PlaylistTTrackId",
                "constrained_columns": ["TTrackID"],
                "referred_schema": "Test_Schema",
                "referred_table": "Track",
                "referred_columns": ["trackid"],
                "options": {},
            },
            {
                "name": "FK_PlaylistTrackId",
                "constrained_columns": ["TrackID"],
                "referred_schema": None,
                "referred_table": "Track",
                "referred_columns": ["trackid"],
                "options": {},
            },
        ]
        ischema_casing_1 = [
            (util.u("Test"), util.u("Track"), "TrackID"),
            (util.u("Test_Schema"), util.u("Track"), "TrackID"),
        ]
        return fkeys_casing_1, ischema_casing_1

    def _bug_88718_96365_casing_2(self):
        fkeys_casing_2 = [
            {
                "name": "FK_PlaylistTTrackId",
                "constrained_columns": ["TTrackID"],
                # I haven't tested schema name but since col/table both
                # do it, assume schema name also comes back wrong
                "referred_schema": "test_schema",
                "referred_table": "track",
                "referred_columns": ["trackid"],
                "options": {},
            },
            {
                "name": "FK_PlaylistTrackId",
                "constrained_columns": ["TrackID"],
                "referred_schema": None,
                # table name also comes back wrong (probably schema also)
                # with casing=2, see https://bugs.mysql.com/bug.php?id=96365
                "referred_table": "track",
                "referred_columns": ["trackid"],
                "options": {},
            },
        ]
        ischema_casing_2 = [
            ("Test", "Track", "TrackID"),
            ("Test_Schema", "Track", "TrackID"),
        ]
        return fkeys_casing_2, ischema_casing_2

    def test_correct_for_mysql_bugs_88718_96365(self):
        dialect = mysql.dialect()

        for casing, (fkeys, ischema) in [
            (0, self._bug_88718_96365_casing_0()),
            (1, self._bug_88718_96365_casing_1()),
            (2, self._bug_88718_96365_casing_2()),
        ]:
            dialect._casing = casing
            dialect.default_schema_name = "Test"
            connection = mock.Mock(dialect=dialect,
                                   execute=lambda stmt, params: ischema)
            dialect._correct_for_mysql_bugs_88718_96365(fkeys, connection)
            eq_(
                fkeys,
                [
                    {
                        "name": "FK_PlaylistTTrackId",
                        "constrained_columns": ["TTrackID"],
                        "referred_schema": "Test_Schema",
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                    {
                        "name": "FK_PlaylistTrackId",
                        "constrained_columns": ["TrackID"],
                        "referred_schema": None,
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                ],
            )

    def test_case_sensitive_column_constraint_reflection(
            self, metadata, connection):
        # test for issue #4344 which works around
        # MySQL 8.0 bug https://bugs.mysql.com/bug.php?id=88718

        m1 = metadata

        Table(
            "Track",
            m1,
            Column("TrackID", Integer, primary_key=True),
            mysql_engine="InnoDB",
        )
        Table(
            "Track",
            m1,
            Column("TrackID", Integer, primary_key=True),
            schema=testing.config.test_schema,
            mysql_engine="InnoDB",
        )
        Table(
            "PlaylistTrack",
            m1,
            Column("id", Integer, primary_key=True),
            Column(
                "TrackID",
                ForeignKey("Track.TrackID", name="FK_PlaylistTrackId"),
            ),
            Column(
                "TTrackID",
                ForeignKey(
                    "%s.Track.TrackID" % (testing.config.test_schema, ),
                    name="FK_PlaylistTTrackId",
                ),
            ),
            mysql_engine="InnoDB",
        )
        m1.create_all(connection)

        if connection.dialect._casing in (1, 2):
            # the original test for the 88718 fix here in [ticket:4344]
            # actually set  referred_table='track', with the wrong casing!
            # this test was never run. with [ticket:4751], I've gone through
            # the trouble to create docker containers with true
            # lower_case_table_names=2 and per
            # https://bugs.mysql.com/bug.php?id=96365 the table name being
            # lower case is also an 8.0 regression.

            eq_(
                inspect(connection).get_foreign_keys("PlaylistTrack"),
                [
                    {
                        "name": "FK_PlaylistTTrackId",
                        "constrained_columns": ["TTrackID"],
                        "referred_schema": testing.config.test_schema,
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                    {
                        "name": "FK_PlaylistTrackId",
                        "constrained_columns": ["TrackID"],
                        "referred_schema": None,
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                ],
            )
        else:
            eq_(
                sorted(
                    inspect(connection).get_foreign_keys("PlaylistTrack"),
                    key=lambda elem: elem["name"],
                ),
                [
                    {
                        "name": "FK_PlaylistTTrackId",
                        "constrained_columns": ["TTrackID"],
                        "referred_schema": testing.config.test_schema,
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                    {
                        "name": "FK_PlaylistTrackId",
                        "constrained_columns": ["TrackID"],
                        "referred_schema": None,
                        "referred_table": "Track",
                        "referred_columns": ["TrackID"],
                        "options": {},
                    },
                ],
            )

    def test_get_foreign_key_name_w_foreign_key_in_name(
            self, metadata, connection):
        Table(
            "a",
            metadata,
            Column("id", Integer, primary_key=True),
            mysql_engine="InnoDB",
        )

        cons = ForeignKeyConstraint(["aid"], ["a.id"],
                                    name="foreign_key_thing_with_stuff")
        Table(
            "b",
            metadata,
            Column("id", Integer, primary_key=True),
            Column("aid", ),
            cons,
            mysql_engine="InnoDB",
        )
        actual_name = cons.name

        metadata.create_all(connection)

        if testing.requires.foreign_key_constraint_name_reflection.enabled:
            expected_name = actual_name
        else:
            expected_name = "b_ibfk_1"

        eq_(
            inspect(connection).get_foreign_keys("b"),
            [{
                "name": expected_name,
                "constrained_columns": ["aid"],
                "referred_schema": None,
                "referred_table": "a",
                "referred_columns": ["id"],
                "options": {},
            }],
        )

    @testing.requires.mysql_fully_case_sensitive
    def test_case_sensitive_reflection_dual_case_references(
            self, metadata, connection):
        # this tests that within the fix we do for MySQL bug
        # 88718, we don't do case-insensitive logic if the backend
        # is case sensitive
        m = metadata
        Table(
            "t1",
            m,
            Column("some_id", Integer, primary_key=True),
            mysql_engine="InnoDB",
        )

        Table(
            "T1",
            m,
            Column("Some_Id", Integer, primary_key=True),
            mysql_engine="InnoDB",
        )

        Table(
            "t2",
            m,
            Column("id", Integer, primary_key=True),
            Column("t1id", ForeignKey("t1.some_id", name="t1id_fk")),
            Column("cap_t1id", ForeignKey("T1.Some_Id", name="cap_t1id_fk")),
            mysql_engine="InnoDB",
        )
        m.create_all(connection)

        eq_(
            dict((rec["name"], rec)
                 for rec in inspect(connection).get_foreign_keys("t2")),
            {
                "cap_t1id_fk": {
                    "name": "cap_t1id_fk",
                    "constrained_columns": ["cap_t1id"],
                    "referred_schema": None,
                    "referred_table": "T1",
                    "referred_columns": ["Some_Id"],
                    "options": {},
                },
                "t1id_fk": {
                    "name": "t1id_fk",
                    "constrained_columns": ["t1id"],
                    "referred_schema": None,
                    "referred_table": "t1",
                    "referred_columns": ["some_id"],
                    "options": {},
                },
            },
        )
Example #23
0
from sqlalchemy.dialects.postgresql import ARRAY, REAL

from credovi.schema import metadata, schema
from credovi.util.sqlalchemy import comment_on_table_elements

domains = Table('domains',
                metadata,
                Column('domain_id',
                       Integer,
                       autoincrement=True,
                       nullable=False),
                Column('db_source', String(12), nullable=False),
                Column('db_accession_id', String(16), nullable=False),
                Column('num_chains',
                       Integer,
                       DefaultClause('0'),
                       nullable=False),
                Column('num_binding_sites',
                       Integer,
                       DefaultClause('0'),
                       nullable=False),
                Column('num_bs_with_drug_likes',
                       Integer,
                       DefaultClause('0'),
                       nullable=False),
                Column('num_bs_with_fragments',
                       Integer,
                       DefaultClause('0'),
                       nullable=False),
                Column('description', Text),
                schema=schema)
Example #24
0
    def test_timestamp_defaults(self):
        """Exercise funky TIMESTAMP default syntax when used in columns."""

        columns = [
            ([TIMESTAMP], {}, "TIMESTAMP NULL"),
            ([mysql.MSTimeStamp], {}, "TIMESTAMP NULL"),
            (
                [
                    mysql.MSTimeStamp(),
                    DefaultClause(sql.text("CURRENT_TIMESTAMP")),
                ],
                {},
                "TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP",
            ),
            (
                [
                    mysql.MSTimeStamp,
                    DefaultClause(sql.text("CURRENT_TIMESTAMP")),
                ],
                {"nullable": False},
                "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP",
            ),
            (
                [
                    mysql.MSTimeStamp,
                    DefaultClause(sql.text("'1999-09-09 09:09:09'")),
                ],
                {"nullable": False},
                "TIMESTAMP NOT NULL DEFAULT '1999-09-09 09:09:09'",
            ),
            (
                [
                    mysql.MSTimeStamp(),
                    DefaultClause(sql.text("'1999-09-09 09:09:09'")),
                ],
                {},
                "TIMESTAMP NULL DEFAULT '1999-09-09 09:09:09'",
            ),
            (
                [
                    mysql.MSTimeStamp(),
                    DefaultClause(
                        sql.text(
                            "'1999-09-09 09:09:09' "
                            "ON UPDATE CURRENT_TIMESTAMP"
                        )
                    ),
                ],
                {},
                "TIMESTAMP NULL DEFAULT '1999-09-09 09:09:09' "
                "ON UPDATE CURRENT_TIMESTAMP",
            ),
            (
                [
                    mysql.MSTimeStamp,
                    DefaultClause(
                        sql.text(
                            "'1999-09-09 09:09:09' "
                            "ON UPDATE CURRENT_TIMESTAMP"
                        )
                    ),
                ],
                {"nullable": False},
                "TIMESTAMP NOT NULL DEFAULT '1999-09-09 09:09:09' "
                "ON UPDATE CURRENT_TIMESTAMP",
            ),
            (
                [
                    mysql.MSTimeStamp(),
                    DefaultClause(
                        sql.text(
                            "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP"
                        )
                    ),
                ],
                {},
                "TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP "
                "ON UPDATE CURRENT_TIMESTAMP",
            ),
            (
                [
                    mysql.MSTimeStamp,
                    DefaultClause(
                        sql.text(
                            "CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP"
                        )
                    ),
                ],
                {"nullable": False},
                "TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP "
                "ON UPDATE CURRENT_TIMESTAMP",
            ),
        ]
        for spec, kw, expected in columns:
            c = Column("t", *spec, **kw)
            Table("t", MetaData(), c)
            self.assert_compile(schema.CreateColumn(c), "t %s" % expected)
Example #25
0
 def test_empty_insert_pk4(self):
     self._test_empty_insert(
         Table('d', MetaData(testing.db),
               Column('x', Integer, primary_key=True),
               Column('y', Integer, DefaultClause('123'))))
Example #26
0
    def test_autoincrement(self):
        meta = MetaData(testing.db)
        try:
            Table('ai_1',
                  meta,
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  Column('int_n',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True),
                  mysql_engine='MyISAM')
            Table('ai_2',
                  meta,
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  Column('int_n',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True),
                  mysql_engine='MyISAM')
            Table('ai_3',
                  meta,
                  Column('int_n',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True,
                         autoincrement=False),
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  mysql_engine='MyISAM')
            Table('ai_4',
                  meta,
                  Column('int_n',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True,
                         autoincrement=False),
                  Column('int_n2',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True,
                         autoincrement=False),
                  mysql_engine='MyISAM')
            Table('ai_5',
                  meta,
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  Column('int_n',
                         Integer,
                         DefaultClause('0'),
                         primary_key=True,
                         autoincrement=False),
                  mysql_engine='MyISAM')
            Table('ai_6',
                  meta,
                  Column('o1', String(1), DefaultClause('x'),
                         primary_key=True),
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  mysql_engine='MyISAM')
            Table('ai_7',
                  meta,
                  Column('o1', String(1), DefaultClause('x'),
                         primary_key=True),
                  Column('o2', String(1), DefaultClause('x'),
                         primary_key=True),
                  Column('int_y',
                         Integer,
                         primary_key=True,
                         autoincrement=True),
                  mysql_engine='MyISAM')
            Table('ai_8',
                  meta,
                  Column('o1', String(1), DefaultClause('x'),
                         primary_key=True),
                  Column('o2', String(1), DefaultClause('x'),
                         primary_key=True),
                  mysql_engine='MyISAM')
            meta.create_all()

            table_names = [
                'ai_1', 'ai_2', 'ai_3', 'ai_4', 'ai_5', 'ai_6', 'ai_7', 'ai_8'
            ]
            mr = MetaData(testing.db)
            mr.reflect(only=table_names)

            for tbl in [mr.tables[name] for name in table_names]:
                for c in tbl.c:
                    if c.name.startswith('int_y'):
                        assert c.autoincrement
                    elif c.name.startswith('int_n'):
                        assert not c.autoincrement
                tbl.insert().execute()
                if 'int_y' in tbl.c:
                    assert select([tbl.c.int_y]).scalar() == 1
                    assert list(tbl.select().execute().first()).count(1) == 1
                else:
                    assert 1 not in list(tbl.select().execute().first())
        finally:
            meta.drop_all()
Example #27
0
class Authority(db.Model):
    __tablename__ = "authorities"
    id = Column(Integer, primary_key=True)
    owner = Column(String(128), nullable=False)
    name = Column(String(128), unique=True)
    body = Column(Text())
    chain = Column(Text())
    active = Column(Boolean, default=True)
    plugin_name = Column(String(64))
    description = Column(Text)
    options = Column(JSON)
    date_created = Column(DateTime, DefaultClause(func.now()), nullable=False)
    roles = relationship(
        "Role",
        secondary=roles_authorities,
        passive_deletes=True,
        backref=db.backref("authority"),
        lazy="dynamic",
    )
    user_id = Column(Integer, ForeignKey("users.id"))
    authority_certificate = relationship(
        "Certificate",
        backref="root_authority",
        uselist=False,
        foreign_keys="Certificate.root_authority_id",
    )
    certificates = relationship("Certificate",
                                backref="authority",
                                foreign_keys="Certificate.authority_id")

    authority_pending_certificate = relationship(
        "PendingCertificate",
        backref="root_authority",
        uselist=False,
        foreign_keys="PendingCertificate.root_authority_id",
    )
    pending_certificates = relationship(
        "PendingCertificate",
        backref="authority",
        foreign_keys="PendingCertificate.authority_id",
    )

    def __init__(self, **kwargs):
        self.owner = kwargs["owner"]
        self.roles = kwargs.get("roles", [])
        self.name = kwargs.get("name")
        self.description = kwargs.get("description")
        self.authority_certificate = kwargs["authority_certificate"]
        self.plugin_name = kwargs["plugin"]["slug"]
        self.options = kwargs.get("options")

    @property
    def plugin(self):
        return plugins.get(self.plugin_name)

    @property
    def is_cab_compliant(self):
        """
        Parse the options to find whether authority is CAB Forum Compliant,
        i.e., adhering to the CA/Browser Forum Baseline Requirements.
        Returns None if option is not available
        """
        if not self.options:
            return None

        options_array = json.loads(self.options)
        if isinstance(options_array, list):
            for option in options_array:
                if "name" in option and option["name"] == 'cab_compliant':
                    return option["value"]

        return None

    @property
    def max_issuance_days(self):
        if self.is_cab_compliant:
            return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)

    @property
    def default_validity_days(self):
        if self.is_cab_compliant:
            return current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)

        return current_app.config.get("DEFAULT_VALIDITY_DAYS",
                                      365)  # 1 year default

    def __repr__(self):
        return "Authority(name={name})".format(name=self.name)

    @property
    def is_cn_optional(self):
        """
        Parse the options to find whether common name is treated as an optional field.
        Returns False if option is not available
        """
        if not self.options:
            return False

        options_array = json.loads(self.options)
        if isinstance(options_array, list):
            for option in options_array:
                if "name" in option and option["name"] == 'cn_optional':
                    return option["value"]

        return False
Example #28
0
class PendingCertificate(db.Model):
    __tablename__ = "pending_certs"
    id = Column(Integer, primary_key=True)
    external_id = Column(String(128))
    owner = Column(String(128), nullable=False)
    name = Column(String(256), unique=True)
    description = Column(String(1024))
    notify = Column(Boolean, default=True)
    number_attempts = Column(Integer)
    rename = Column(Boolean, default=True)
    resolved = Column(Boolean, default=False)
    resolved_cert_id = Column(Integer, nullable=True)

    cn = Column(String(128))
    csr = Column(Text(), nullable=False)
    chain = Column(Text())
    private_key = Column(Vault, nullable=True)

    date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
    dns_provider_id = Column(
        Integer, ForeignKey("dns_providers.id", ondelete="CASCADE")
    )

    status = Column(Text(), nullable=True)
    last_updated = Column(
        ArrowType, DefaultClause(func.now()), onupdate=func.now(), nullable=False
    )

    rotation = Column(Boolean, default=False)
    user_id = Column(Integer, ForeignKey("users.id"))
    authority_id = Column(Integer, ForeignKey("authorities.id", ondelete="CASCADE"))
    root_authority_id = Column(
        Integer, ForeignKey("authorities.id", ondelete="CASCADE")
    )
    rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id"))

    notifications = relationship(
        "Notification",
        secondary=pending_cert_notification_associations,
        backref="pending_cert",
        passive_deletes=True,
    )
    destinations = relationship(
        "Destination",
        secondary=pending_cert_destination_associations,
        backref="pending_cert",
        passive_deletes=True,
    )
    sources = relationship(
        "Source",
        secondary=pending_cert_source_associations,
        backref="pending_cert",
        passive_deletes=True,
    )
    roles = relationship(
        "Role",
        secondary=pending_cert_role_associations,
        backref="pending_cert",
        passive_deletes=True,
    )
    replaces = relationship(
        "Certificate",
        secondary=pending_cert_replacement_associations,
        backref="pending_cert",
        passive_deletes=True,
    )
    options = Column(JSONType)

    rotation_policy = relationship("RotationPolicy")

    sensitive_fields = ("private_key",)

    def __init__(self, **kwargs):
        self.csr = kwargs.get("csr")
        self.private_key = kwargs.get("private_key", "")
        if self.private_key:
            # If the request does not send private key, the key exists but the value is None
            self.private_key = self.private_key.strip()
        self.external_id = kwargs.get("external_id")

        # when destinations are appended they require a valid name.
        if kwargs.get("name"):
            self.name = get_or_increase_name(defaults.text_to_slug(kwargs["name"]), 0)
            self.rename = False
        else:
            # TODO: Fix auto-generated name, it should be renamed on creation
            self.name = get_or_increase_name(
                defaults.certificate_name(
                    kwargs["common_name"],
                    kwargs["authority"].name,
                    dt.now(),
                    dt.now(),
                    False,
                ),
                self.external_id,
            )
            self.rename = True

        self.cn = defaults.common_name(utils.parse_csr(self.csr))
        self.owner = kwargs["owner"]
        self.number_attempts = 0

        if kwargs.get("chain"):
            self.chain = kwargs["chain"].strip()

        self.notify = kwargs.get("notify", True)
        self.destinations = kwargs.get("destinations", [])
        self.notifications = kwargs.get("notifications", [])
        self.description = kwargs.get("description")
        self.roles = list(set(kwargs.get("roles", [])))
        self.replaces = kwargs.get("replaces", [])
        self.rotation = kwargs.get("rotation")
        self.rotation_policy = kwargs.get("rotation_policy")
        try:
            self.dns_provider_id = kwargs.get("dns_provider").id
        except (AttributeError, KeyError, TypeError, Exception):
            pass