Esempio n. 1
0
class Prefix(Base):
    __tablename__ = 'prefix'
    id = schema.Column('id',
                       types.Integer,
                       schema.Sequence('prefix_id_seq'),
                       primary_key=True)
    prefix = schema.Column('prefix', types.Text())
Esempio n. 2
0
    def get_column_specification(self, column, **kwargs):
        colspec = self.preparer.format_column(
            column) + " " + column.type.dialect_impl(
                self.dialect).get_col_spec()

        # install a sequence if we have an implicit IDENTITY column
        if (not getattr(column.table, 'has_sequence', False)) and column.primary_key and \
                column.autoincrement and isinstance(column.type, types.Integer) and not column.foreign_keys:
            if column.default is None or (isinstance(column.default,
                                                     schema.Sequence)
                                          and column.default.optional):
                column.sequence = schema.Sequence(column.name + '_seq')

        if not column.nullable:
            colspec += " NOT NULL"

        if hasattr(column, 'sequence'):
            column.table.has_sequence = column
            colspec = self.preparer.format_column(column) + " counter"
        else:
            default = self.get_column_default_string(column)
            if default is not None:
                colspec += " DEFAULT " + default

        return colspec
Esempio n. 3
0
class InvoiceEntry(meta.BaseObject):
    __tablename__ = "invoice_entry"

    id = schema.Column(types.Integer(),
                       schema.Sequence("invoice_entry_id_seq", optional=True),
                       primary_key=True,
                       autoincrement=True)
    position = schema.Column(types.Integer(), default=0)
    invoice_id = schema.Column(types.Integer(),
                               schema.ForeignKey(Invoice.id,
                                                 onupdate="CASCADE",
                                                 ondelete="CASCADE"),
                               nullable=False)
    invoice = orm.relationship(Invoice,
                               backref=orm.backref("entries",
                                                   order_by=position))
    description = schema.Column(types.UnicodeText(), nullable=False)
    vat = schema.Column(types.Integer(), nullable=False)
    currency_id = schema.Column(types.Integer(3),
                                schema.ForeignKey(Currency.id,
                                                  onupdate="RESTRICT",
                                                  ondelete="RESTRICT"),
                                nullable=False)
    currency = orm.relationship(Currency, lazy="joined")
    unit_price = schema.Column(types.Numeric(precision=7, scale=2),
                               nullable=False)
    units = schema.Column(types.Numeric(4, 2), nullable=False, default=1)

    @property
    def total(self):
        return self.unit_price * self.units * self.currency.rate
Esempio n. 4
0
    def get_column_specification(self, column, **kwargs):

        colspec = self.preparer.format_column(column)

        if (not getattr(column.table, 'has_sequence', False)) and column.primary_key and \
                column.autoincrement and isinstance(column.type, sqltypes.Integer):
            if column.default is None or (isinstance(column.default,
                                                     schema.Sequence)
                                          and column.default.optional):
                column.sequence = schema.Sequence(column.name + '_seq')

        if hasattr(column, 'sequence'):
            column.table.has_sequence = column
            #colspec += " numeric(30,0) IDENTITY"
            colspec += " Integer IDENTITY"
        else:
            colspec += " " + column.type.dialect_impl(
                self.dialect, _for_ddl=column).get_col_spec()

        if not column.nullable:
            colspec += " NOT NULL"

        default = self.get_column_default_string(column)
        if default is not None:
            colspec += " DEFAULT " + default

        return colspec
Esempio n. 5
0
    def get_column_specification(self, column, **kwargs):
        colspec = self.preparer.format_column(
            column) + " " + column.type.engine_impl(
                self.engine).get_col_spec()

        # install a IDENTITY Sequence if we have an implicit IDENTITY column
        if column.primary_key and column.autoincrement and isinstance(
                column.type, sqltypes.Integer) and not column.foreign_key:
            if column.default is None or (isinstance(column.default,
                                                     schema.Sequence)
                                          and column.default.optional):
                column.sequence = schema.Sequence(column.name + '_seq')

        if not column.nullable:
            colspec += " NOT NULL"

        if hasattr(column, 'sequence'):
            colspec += " IDENTITY(%s,%s)" % (column.sequence.start or 1,
                                             column.sequence.increment or 1)
        else:
            default = self.get_column_default_string(column)
            if default is not None:
                colspec += " DEFAULT " + default

        return colspec
Esempio n. 6
0
class Users(object):
    __tablename__ = 'users'
    id = schema.Column('id',
                       types.Integer,
                       schema.Sequence('users_id_seq'),
                       primary_key=True)
    name = schema.Column('name', types.Text())
    password = schema.Column('password', types.Text())
    real_name = schema.Column('real_name', types.Text())
Esempio n. 7
0
def Table(*args, **kw):
    """A schema.Table wrapper/hook for dialect-specific tweaks."""

    global schema
    if schema is None:
        from sqlalchemy import schema

    test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
                      if k.startswith('test_')])

    kw.update(table_options)

    if testing.against('mysql'):
        if 'mysql_engine' not in kw and 'mysql_type' not in kw:
            if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
                kw['mysql_engine'] = 'InnoDB'

    # Apply some default cascading rules for self-referential foreign keys.
    # MySQL InnoDB has some issues around seleting self-refs too.
    if testing.against('firebird'):
        table_name = args[0]
        unpack = (testing.config.db.dialect.
                  identifier_preparer.unformat_identifiers)

        # Only going after ForeignKeys in Columns.  May need to
        # expand to ForeignKeyConstraint too.
        fks = [fk
               for col in args if isinstance(col, schema.Column)
               for fk in col.args if isinstance(fk, schema.ForeignKey)]

        for fk in fks:
            # root around in raw spec
            ref = fk._colspec
            if isinstance(ref, schema.Column):
                name = ref.table.name
            else:
                # take just the table name: on FB there cannot be
                # a schema, so the first element is always the
                # table name, possibly followed by the field name
                name = unpack(ref)[0]
            if name == table_name:
                if fk.ondelete is None:
                    fk.ondelete = 'CASCADE'
                if fk.onupdate is None:
                    fk.onupdate = 'CASCADE'

    if testing.against('firebird', 'oracle'):
        pk_seqs = [col for col in args
                   if (isinstance(col, schema.Column)
                       and col.primary_key
                       and getattr(col, '_needs_autoincrement', False))]
        for c in pk_seqs:
            c.args.append(schema.Sequence(args[0] + '_' + c.name + '_seq', optional=True))
    return schema.Table(*args, **kw)
Esempio n. 8
0
 def setup_project(self):
     self.project_table = schema.Table(
         'project',
         self.metadata,
         schema.Column('id',
                       types.Integer,
                       schema.Sequence('project_id_seq'),
                       primary_key=True),
         schema.Column('title', types.Unicode()),
     )
     orm.mapper(ProjectRecord, self.project_table)
Esempio n. 9
0
class Comment(Base):
    __tablename__ = 'comments'
    id = schema.Column(types.Integer,
                       schema.Sequence('comment_seq_id', optional=True),
                       primary_key=True)
    pageid = schema.Column(types.Integer,
                           schema.ForeignKey('pages.id'),
                           nullable=False)
    content = schema.Column(types.Text(), default=u'')
    name = schema.Column(types.Unicode(255))
    email = schema.Column(types.Unicode(255), nullable=False)
    created = schema.Column(types.TIMESTAMP(), default=now)
Esempio n. 10
0
class Music(Base):
    __tablename__ = 'music'
    id = schema.Column(types.Integer,
                       schema.Sequence('music_seq_id'),
                       primary_key=True)
    title = schema.Column(types.Text(), default="")
    artist = schema.Column(types.Text(), default="")
    albumart = schema.Column(types.Text(), default="/albumart.jpg")
    name = schema.Column(types.Text())
    path = schema.Column(types.Text())
    summary = schema.Column(types.Text(), default="No info found")
    content = schema.Column(types.Text(), default="No info found")
Esempio n. 11
0
class Page(Base):
    __tablename__ = 'pages'
    id = schema.Column(types.Integer,
                       schema.Sequence('page_seq_id', optional=True),
                       primary_key=True)
    content = schema.Column(types.Text(), nullable=False)
    posted = schema.Column(types.DateTime(), default=now)
    title = schema.Column(types.Unicode(255), default=u'Untitled Page')
    heading = schema.Column(types.Unicode(255))
    gender = schema.Column(types.Enum('male', 'female'))

    comments = orm.relationship("Comment", backref='page')

    tags = orm.relationship('Tag', secondary=pagetag_table, backref='pages')
Esempio n. 12
0
class InvoiceNote(meta.BaseObject):
    __tablename__ = "invoice_note"

    id = schema.Column(types.Integer(),
                       schema.Sequence("invoice_note_id_seq", optional=True),
                       primary_key=True,
                       autoincrement=True)
    posted = schema.Column(types.DateTime(),
                           nullable=False,
                           default=functions.now())
    invoice_id = schema.Column(types.Integer(),
                               schema.ForeignKey(Invoice.id,
                                                 onupdate="CASCADE",
                                                 ondelete="CASCADE"),
                               nullable=False)
    invoice = orm.relationship(Invoice,
                               backref=orm.backref("notes", order_by=posted))
    comment = schema.Column(types.UnicodeText(), nullable=False)
Esempio n. 13
0
class Currency(BaseObject):
    """A currency

    Currencies are identified by their ISO 4217 three letter currency
    code.
    """
    __tablename__ = "currency"

    id = schema.Column(types.Integer(),
                       schema.Sequence("currency_id_seq", optional=True),
                       primary_key=True,
                       autoincrement=True)
    code = schema.Column(types.String(3), nullable=False)
    rate = schema.Column(types.Numeric(precision=6, scale=2), nullable=False)
    until = schema.Column(types.Date())

    def __repr__(self):
        return "<Currency id=%s, code=%s rate=%.2f>" % (self.id, self.code,
                                                        self.rate)
Esempio n. 14
0
class Customer(BaseObject):
    """A customer
    """
    __tablename__ = "customer"
    __table_args__ = (schema.UniqueConstraint("account_id", "title"),
                      schema.UniqueConstraint("account_id",
                                              "invoice_code"), {})

    id = schema.Column(types.Integer(),
                       schema.Sequence("customer_id_seq", optional=True),
                       primary_key=True,
                       autoincrement=True)
    account_id = schema.Column(types.Integer(),
                               schema.ForeignKey(Account.id,
                                                 onupdate="CASCADE",
                                                 ondelete="CASCADE"),
                               nullable=False)
    account = orm.relationship(Account, backref="customers")
    title = schema.Column(types.Unicode(128), nullable=False)
    invoice_code = schema.Column(types.String(16), nullable=False)
    ein = schema.Column(types.String(64))
    address = schema.Column(types.UnicodeText)
    postal_code = schema.Column(types.String(16))
    city = schema.Column(types.Unicode(64))
    country = schema.Column(types.String(3))
    contact_name = schema.Column(types.Unicode(64))
    contact_email = schema.Column(types.String(64))
    contact_phone = schema.Column(types.String(32))

    def __repr__(self):
        return "<Currency %s rate=%.2f>" % (self.code, self.rate)

    @orm.reconstructor
    def _add_acls(self):
        self.__acl__ = [(security.Allow, self.account_id,
                         ["view", "edit", "add-invoice"])]
Esempio n. 15
0
    def reflecttable(self, connection, table, include_columns):
        # Get base columns
        if table.schema is not None:
            current_schema = table.schema
        else:
            current_schema = self.get_default_schema_name(connection)

        s = sql.select([columns, domains],
                       tables.c.table_name == table.name,
                       from_obj=[columns.join(tables).join(domains)],
                       order_by=[columns.c.column_id])

        c = connection.execute(s)
        found_table = False
        # makes sure we append the columns in the correct order
        while True:
            row = c.fetchone()
            if row is None:
                break
            found_table = True
            (name, type, nullable, charlen, numericprec, numericscale, default,
             primary_key, max_identity, table_id, column_id) = (
                 row[columns.c.column_name],
                 row[domains.c.domain_name],
                 row[columns.c.nulls] == 'Y',
                 row[columns.c.width],
                 row[domains.c.precision],
                 row[columns.c.scale],
                 row[columns.c.default],
                 row[columns.c.pkey] == 'Y',
                 row[columns.c.max_identity],
                 row[tables.c.table_id],
                 row[columns.c.column_id],
             )
            if include_columns and name not in include_columns:
                continue

            # FIXME: else problems with SybaseBinary(size)
            if numericscale == 0:
                numericscale = None

            args = []
            for a in (charlen, numericprec, numericscale):
                if a is not None:
                    args.append(a)
            coltype = self.ischema_names.get(type, None)
            if coltype == SybaseString and charlen == -1:
                coltype = SybaseText()
            else:
                if coltype is None:
                    util.warn("Did not recognize type '%s' of column '%s'" %
                              (type, name))
                    coltype = sqltypes.NULLTYPE
                coltype = coltype(*args)
            colargs = []
            if default is not None:
                colargs.append(schema.PassiveDefault(sql.text(default)))

            # any sequences ?
            col = schema.Column(name,
                                coltype,
                                nullable=nullable,
                                primary_key=primary_key,
                                *colargs)
            if int(max_identity) > 0:
                col.sequence = schema.Sequence(name + '_identity')
                col.sequence.start = int(max_identity)
                col.sequence.increment = 1

            # append the column
            table.append_column(col)

        # any foreign key constraint for this table ?
        # note: no multi-column foreign keys are considered
        s = "select st1.table_name, sc1.column_name, st2.table_name, sc2.column_name from systable as st1 join sysfkcol on st1.table_id=sysfkcol.foreign_table_id join sysforeignkey join systable as st2 on sysforeignkey.primary_table_id = st2.table_id join syscolumn as sc1 on sysfkcol.foreign_column_id=sc1.column_id and sc1.table_id=st1.table_id join syscolumn as sc2 on sysfkcol.primary_column_id=sc2.column_id and sc2.table_id=st2.table_id where st1.table_name='%(table_name)s';" % {
            'table_name': table.name
        }
        c = connection.execute(s)
        foreignKeys = {}
        while True:
            row = c.fetchone()
            if row is None:
                break
            (foreign_table, foreign_column, primary_table, primary_column) = (
                row[0],
                row[1],
                row[2],
                row[3],
            )
            if not primary_table in foreignKeys.keys():
                foreignKeys[primary_table] = [[
                    '%s' % (foreign_column)
                ], ['%s.%s' % (primary_table, primary_column)]]
            else:
                foreignKeys[primary_table][0].append('%s' % (foreign_column))
                foreignKeys[primary_table][1].append(
                    '%s.%s' % (primary_table, primary_column))
        for primary_table in foreignKeys.keys():
            #table.append_constraint(schema.ForeignKeyConstraint(['%s.%s'%(foreign_table, foreign_column)], ['%s.%s'%(primary_table,primary_column)]))
            table.append_constraint(
                schema.ForeignKeyConstraint(foreignKeys[primary_table][0],
                                            foreignKeys[primary_table][1]))

        if not found_table:
            raise exceptions.NoSuchTableError(table.name)
Esempio n. 16
0
def now():
    u_now = u"%s" % datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
    return u_now


######################## MODEL ################################################
table_prefix = config.get("linotpAudit.sql.table_prefix", "")

audit_table_name = '%saudit' % table_prefix

audit_table = schema.Table(
    audit_table_name, metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('audit_seq_id', optional=True),
                  primary_key=True),
    schema.Column('timestamp', types.Unicode(30), default=now, index=True),
    schema.Column('signature', types.Unicode(512), default=u''),
    schema.Column('action', types.Unicode(30), index=True),
    schema.Column('success', types.Unicode(30), default=u"False"),
    schema.Column('serial', types.Unicode(30), index=True),
    schema.Column('tokentype', types.Unicode(40)),
    schema.Column('user', types.Unicode(255), index=True),
    schema.Column('realm', types.Unicode(255), index=True),
    schema.Column('administrator', types.Unicode(255)),
    schema.Column('action_detail', types.Unicode(512), default=u''),
    schema.Column('info', types.Unicode(512), default=u''),
    schema.Column('linotp_server', types.Unicode(80)),
    schema.Column('client', types.Unicode(80)),
    schema.Column('log_level', types.Unicode(20), default=u"INFO", index=True),
Esempio n. 17
0
import datetime
from sqlalchemy import schema, types

metadata = schema.MetaData()


def now():
    return datetime.datetime.now()


page_table = schema.Table(
    'page',
    metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('page_seq_id', optional=True),
                  primary_key=True),
    schema.Column('content', types.Text(), nullable=False),
    schema.Column('posted', types.DateTime(), default=now),
    schema.Column('title', types.Unicode(255), default=u'Untitled Page'),
    schema.Column('heading', types.Unicode(255)),
)
comment_table = schema.Table(
    'comment',
    metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('comment_seq_id', optional=True),
                  primary_key=True),
    schema.Column('pageid',
                  types.Integer,
Esempio n. 18
0
    def reflecttable(self, connection, table):
        import sqlalchemy.databases.information_schema as ischema

        # Get base columns
        if table.schema is not None:
            current_schema = table.schema
        else:
            current_schema = self.get_default_schema_name()

        columns = self.uppercase_table(ischema.columns)
        s = sql.select([columns],
                       current_schema
                       and sql.and_(columns.c.table_name == table.name,
                                    columns.c.table_schema == current_schema)
                       or columns.c.table_name == table.name,
                       order_by=[columns.c.ordinal_position])

        c = connection.execute(s)
        found_table = False
        while True:
            row = c.fetchone()
            if row is None:
                break
            found_table = True
            (name, type, nullable, charlen, numericprec, numericscale,
             default) = (row[columns.c.column_name], row[columns.c.data_type],
                         row[columns.c.is_nullable] == 'YES',
                         row[columns.c.character_maximum_length],
                         row[columns.c.numeric_precision],
                         row[columns.c.numeric_scale],
                         row[columns.c.column_default])

            args = []
            for a in (charlen, numericprec, numericscale):
                if a is not None:
                    args.append(a)
            coltype = ischema_names[type]
            coltype = coltype(*args)
            colargs = []
            if default is not None:
                colargs.append(schema.PassiveDefault(sql.text(default)))

            table.append_column(
                schema.Column(name, coltype, nullable=nullable, *colargs))

        if not found_table:
            raise exceptions.NoSuchTableError(table.name)

        # We also run an sp_columns to check for identity columns:
        # FIXME: note that this only fetches the existence of an identity column, not it's properties like (seed, increment)
        #        also, add a check to make sure we specify the schema name of the table
        # cursor = table.engine.execute("sp_columns " + table.name, {})
        cursor = connection.execute("sp_columns " + table.name)
        while True:
            row = cursor.fetchone()
            if row is None:
                break
            col_name, type_name = row[3], row[5]
            if type_name.endswith("identity"):
                ic = table.c[col_name]
                # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute
                ic.sequence = schema.Sequence(ic.name + '_identity')

        # Add constraints
        RR = self.uppercase_table(
            ischema.ref_constraints
        )  #information_schema.referential_constraints
        TC = self.uppercase_table(
            ischema.constraints)  #information_schema.table_constraints
        C = self.uppercase_table(ischema.column_constraints).alias(
            'C'
        )  #information_schema.constraint_column_usage: the constrained column
        R = self.uppercase_table(ischema.column_constraints).alias(
            'R'
        )  #information_schema.constraint_column_usage: the referenced column

        # Primary key constraints
        s = sql.select([C.c.column_name, TC.c.constraint_type],
                       sql.and_(TC.c.constraint_name == C.c.constraint_name,
                                C.c.table_name == table.name))
        c = connection.execute(s)
        for row in c:
            if 'PRIMARY' in row[TC.c.constraint_type.name]:
                table.primary_key.add(table.c[row[0]])

        # Foreign key constraints
        s = sql.select([
            C.c.column_name, R.c.table_schema, R.c.table_name, R.c.column_name,
            RR.c.constraint_name, RR.c.match_option, RR.c.update_rule,
            RR.c.delete_rule
        ],
                       sql.and_(
                           C.c.table_name == table.name,
                           C.c.constraint_name == RR.c.constraint_name,
                           R.c.constraint_name == RR.c.unique_constraint_name),
                       order_by=[RR.c.constraint_name])
        rows = connection.execute(s).fetchall()

        # group rows by constraint ID, to handle multi-column FKs
        fknm, scols, rcols = (None, [], [])
        for r in rows:
            scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r
            if rfknm != fknm:
                if fknm:
                    table.append_constraint(
                        schema.ForeignKeyConstraint(
                            scols, ['%s.%s' % (t, c) for (s, t, c) in rcols],
                            fknm))
                fknm, scols, rcols = (rfknm, [], [])
            if (not scol in scols): scols.append(scol)
            if (not (rschema, rtbl, rcol) in rcols):
                rcols.append((rschema, rtbl, rcol))

        if fknm and scols:
            table.append_constraint(
                schema.ForeignKeyConstraint(
                    scols, ['%s.%s' % (t, c) for (s, t, c) in rcols], fknm))
Esempio n. 19
0
    schema.Column('url', types.Text(), default=u''),
    schema.Column('site', types.Text(), default=u''),
    schema.Column('handle', types.Text(), default=u''))

openid_handles_table = schema.Table(
    'openid_handles', metadata,
    schema.Column('handler', types.Unicode(255), primary_key=True),
    schema.Column('secret', types.Text(), default=u''),
    schema.Column('assoc_type', types.Text(), default=u''),
    schema.Column('private', types.Boolean(), default=False))

openid_sites_table = schema.Table(
    'openid_sites', metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('openid_sites_seq_id', optional=True),
                  primary_key=True), schema.Column('handle',
                                                   types.Unicode(255)),
    schema.Column('site', types.Text(), default=u''))

openid_user_table = schema.Table(
    'openid_user', metadata,
    schema.Column('user', types.Unicode(255), primary_key=True),
    schema.Column('token', types.Text(), default=u''),
    schema.Column('expire', types.Integer, default=0, index=True))

openid_trusted_table = schema.Table(
    'openid_trustedroot', metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('openid_sites_seq_id', optional=True),
Esempio n. 20
0
    def reflecttable(self, table, include_columns, exclude_columns=()):
        """Given a Table object, load its internal constructs based on
        introspection.

        This is the underlying method used by most dialects to produce
        table reflection.  Direct usage is like::

            from sqlalchemy import create_engine, MetaData, Table
            from sqlalchemy.engine import reflection

            engine = create_engine('...')
            meta = MetaData()
            user_table = Table('user', meta)
            insp = Inspector.from_engine(engine)
            insp.reflecttable(user_table, None)

        :param table: a :class:`~sqlalchemy.schema.Table` instance.
        :param include_columns: a list of string column names to include
          in the reflection process.  If ``None``, all columns are reflected.

        """
        dialect = self.bind.dialect

        # table attributes we might need.
        reflection_options = {}

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in list(tblkw.items()):
            del tblkw[k]
            tblkw[str(k)] = v

        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)

        # columns
        found_table = False
        cols_by_orig_name = {}

        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            orig_name = col_d['name']

            name = col_d['name']
            if include_columns and name not in include_columns:
                continue
            if exclude_columns and name in exclude_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable': col_d['nullable'],
            }
            for k in ('autoincrement', 'quote', 'info', 'key'):
                if k in col_d:
                    col_kw[k] = col_d[k]

            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL
                # expression, so is wrapped in text() so that no quoting
                # occurs on re-issuance.
                colargs.append(
                    sa_schema.DefaultClause(sql.text(col_d['default']),
                                            _reflected=True))

            if 'sequence' in col_d:
                # TODO: mssql and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)

            cols_by_orig_name[orig_name] = col = \
                        sa_schema.Column(name, coltype, *colargs, **col_kw)

            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            pk_cols = [
                cols_by_orig_name[pk] for pk in pk_cons['constrained_columns']
                if pk in cols_by_orig_name and pk not in exclude_columns
            ]
            pk_cols += [
                pk for pk in table.primary_key if pk.key in exclude_columns
            ]
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(
                name=pk_cons.get('name'), *pk_cols)

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            # look for columns by orig name in cols_by_orig_name,
            # but support columns that are in-Python only as fallback
            constrained_columns = [
                cols_by_orig_name[c].key if c in cols_by_orig_name else c
                for c in fkey_d['constrained_columns']
            ]
            if exclude_columns and set(constrained_columns).intersection(
                    exclude_columns):
                continue
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                schema=referred_schema,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            if 'options' in fkey_d:
                options = fkey_d['options']
            else:
                options = {}
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns,
                                               refspec,
                                               conname,
                                               link_to_name=True,
                                               **options))
        # Indexes
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            # look for columns by orig name in cols_by_orig_name,
            # but support columns that are in-Python only as fallback
            sa_schema.Index(
                name, *[
                    cols_by_orig_name[c]
                    if c in cols_by_orig_name else table.c[c] for c in columns
                ], **dict(unique=unique))
Esempio n. 21
0
ini_file = config.get("__file__")
logging.config.fileConfig(ini_file, disable_existing_loggers=False)
log = logging.getLogger(__name__)

metadata = schema.MetaData()

def now():
    u_now = u"%s" % datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
    return u_now

######################## MODEL ################################################
table_prefix = config.get("linotpAudit.sql.table_prefix", "")
audit_table_name = '%saudit' % table_prefix

audit_table = schema.Table(audit_table_name, metadata,
    schema.Column('id', types.Integer, schema.Sequence('audit_seq_id',
                                                       optional=True),
                  primary_key=True),
    schema.Column('timestamp', types.Unicode(30), default=now, index=True),
    schema.Column('signature', types.Unicode(512), default=u''),
    schema.Column('action', types.Unicode(30), index=True),
    schema.Column('success', types.Unicode(30), default=u"False"),
    schema.Column('serial', types.Unicode(30), index=True),
    schema.Column('tokentype', types.Unicode(40)),
    schema.Column('user', types.Unicode(255), index=True),
    schema.Column('realm', types.Unicode(255), index=True),
    schema.Column('administrator', types.Unicode(255)),
    schema.Column('action_detail', types.Unicode(512), default=u''),
    schema.Column('info', types.Unicode(512), default=u''),
    schema.Column('linotp_server', types.Unicode(80)),
    schema.Column('client', types.Unicode(80)),
    schema.Column('log_level', types.Unicode(20), default=u"INFO", index=True),
Esempio n. 22
0
def now():
    return datetime.datetime.utcnow()


################
# PRIMARY TABLES
################

# form_table holds the data that constitute OLD Forms
form_table = schema.Table(
    'form',
    meta.metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('form_seq_id', optional=True),
                  primary_key=True),

    # Textual values
    # transcription -- orthographic, obligatory
    schema.Column('transcription', types.Unicode(255), nullable=False),
    # phonetic transcription -- broad, optional
    schema.Column('phoneticTranscription', types.Unicode(255)),
    # narrow phonetic transcription -- optional
    schema.Column('narrowPhoneticTranscription', types.Unicode(255)),
    schema.Column('morphemeBreak', types.Unicode(255)),
    schema.Column('morphemeGloss', types.Unicode(255)),
    schema.Column('comments', types.UnicodeText()),
    schema.Column('speakerComments', types.UnicodeText()),
    schema.Column('context',
                  types.UnicodeText()),  # describing context of utterance
Esempio n. 23
0
class Tag(Base):
    __tablename__ = 'tags'
    id = schema.Column(types.Integer,
                       schema.Sequence('tag_seq_id', optional=True),
                       primary_key=True)
    name = schema.Column(types.Unicode(20), nullable=False, unique=True)
Esempio n. 24
0
sm = orm.sessionmaker(autoflush=True, autocommit=False, expire_on_commit=True)
session = orm.scoped_session(sm)
Base = declarative_base()


def now():
    return datetime.datetime.now()


pagetag_table = schema.Table(
    'pagetag',
    Base.metadata,
    schema.Column('id',
                  types.Integer,
                  schema.Sequence('pagetag_seq_id', optional=True),
                  primary_key=True),
    schema.Column('pageid', types.Integer, schema.ForeignKey('pages.id')),
    schema.Column('tagid', types.Integer, schema.ForeignKey('tags.id')),
)


class Page(Base):
    __tablename__ = 'pages'
    id = schema.Column(types.Integer,
                       schema.Sequence('page_seq_id', optional=True),
                       primary_key=True)
    content = schema.Column(types.Text(), nullable=False)
    posted = schema.Column(types.DateTime(), default=now)
    title = schema.Column(types.Unicode(255), default=u'Untitled Page')
    heading = schema.Column(types.Unicode(255))
Esempio n. 25
0
    def reflecttable(self, connection, table, include_columns):
        # Query to extract the details of all the fields of the given table
        tblqry = """
        SELECT DISTINCT r.rdb$field_name AS fname,
                        r.rdb$null_flag AS null_flag,
                        t.rdb$type_name AS ftype,
                        f.rdb$field_sub_type AS stype,
                        f.rdb$field_length AS flen,
                        f.rdb$field_precision AS fprec,
                        f.rdb$field_scale AS fscale,
                        COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault
        FROM rdb$relation_fields r
             JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
             JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE'
        WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
        ORDER BY r.rdb$field_position
        """
        # Query to extract the PK/FK constrained fields of the given table
        keyqry = """
        SELECT se.rdb$field_name AS fname
        FROM rdb$relation_constraints rc
             JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
        """
        # Query to extract the details of each UK/FK of the given table
        fkqry = """
        SELECT rc.rdb$constraint_name AS cname,
               cse.rdb$field_name AS fname,
               ix2.rdb$relation_name AS targetrname,
               se.rdb$field_name AS targetfname
        FROM rdb$relation_constraints rc
             JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
             JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
             JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name
             JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position
        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
        ORDER BY se.rdb$index_name, se.rdb$field_position
        """
        # Heuristic-query to determine the generator associated to a PK field
        genqry = """
        SELECT trigdep.rdb$depended_on_name AS fgenerator
        FROM rdb$dependencies tabdep
             JOIN rdb$dependencies trigdep ON (tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
                                               AND trigdep.rdb$depended_on_type=14
                                               AND trigdep.rdb$dependent_type=2)
             JOIN rdb$triggers trig ON (trig.rdb$trigger_name=tabdep.rdb$dependent_name)
        WHERE tabdep.rdb$depended_on_name=?
          AND tabdep.rdb$depended_on_type=0
          AND trig.rdb$trigger_type=1
          AND tabdep.rdb$field_name=?
          AND (SELECT count(*)
               FROM rdb$dependencies trigdep2
               WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
        """

        tablename = self._denormalize_name(table.name)

        # get primary key fields
        c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
        pkfields = [self._normalize_name(r['fname']) for r in c.fetchall()]

        # get all of the fields for this table
        c = connection.execute(tblqry, [tablename])

        found_table = False
        while True:
            row = c.fetchone()
            if row is None:
                break
            found_table = True

            name = self._normalize_name(row['fname'])
            if include_columns and name not in include_columns:
                continue
            args = [name]

            kw = {}
            # get the data type
            coltype = ischema_names.get(row['ftype'].rstrip())
            if coltype is None:
                util.warn("Did not recognize type '%s' of column '%s'" %
                          (str(row['ftype']), name))
                coltype = sqltypes.NULLTYPE
            else:
                coltype = coltype(row)
            args.append(coltype)

            # is it a primary key?
            kw['primary_key'] = name in pkfields

            # is it nullable?
            kw['nullable'] = not bool(row['null_flag'])

            # does it have a default value?
            if row['fdefault'] is not None:
                # the value comes down as "DEFAULT 'value'"
                assert row['fdefault'].upper().startswith('DEFAULT '), row
                defvalue = row['fdefault'][8:]
                args.append(schema.DefaultClause(sql.text(defvalue)))

            col = schema.Column(*args, **kw)
            if kw['primary_key']:
                # if the PK is a single field, try to see if its linked to
                # a sequence thru a trigger
                if len(pkfields)==1:
                    genc = connection.execute(genqry, [tablename, row['fname']])
                    genr = genc.fetchone()
                    if genr is not None:
                        col.sequence = schema.Sequence(self._normalize_name(genr['fgenerator']))

            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # get the foreign keys
        c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
        fks = {}
        while True:
            row = c.fetchone()
            if not row:
                break

            cname = self._normalize_name(row['cname'])
            try:
                fk = fks[cname]
            except KeyError:
                fks[cname] = fk = ([], [])
            rname = self._normalize_name(row['targetrname'])
            schema.Table(rname, table.metadata, autoload=True, autoload_with=connection)
            fname = self._normalize_name(row['fname'])
            refspec = rname + '.' + self._normalize_name(row['targetfname'])
            fk[0].append(fname)
            fk[1].append(refspec)

        for name, value in fks.iteritems():
            table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name, link_to_name=True))
Esempio n. 26
0
File: schema.py Progetto: oleg84/CDS
 def add_seq(c, tbl):
     c._init_items(
         schema.Sequence(_truncate_name(
             testing.db.dialect, tbl.name + '_' + c.name + '_seq'),
                         optional=True))
Esempio n. 27
0
    def reflecttable(self, connection, table, include_columns):
        import sqlalchemy.databases.information_schema as ischema

        # Get base columns
        if table.schema is not None:
            current_schema = table.schema
        else:
            current_schema = self.get_default_schema_name(connection)

        columns = self.uppercase_table(ischema.columns)
        s = sql.select([columns],
                   current_schema
                       and sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema)
                       or columns.c.table_name==table.name,
                   order_by=[columns.c.ordinal_position])

        c = connection.execute(s)
        found_table = False
        while True:
            row = c.fetchone()
            if row is None:
                break
            found_table = True
            (name, type, nullable, charlen, numericprec, numericscale, default) = (
                row[columns.c.column_name],
                row[columns.c.data_type],
                row[columns.c.is_nullable] == 'YES',
                row[columns.c.character_maximum_length],
                row[columns.c.numeric_precision],
                row[columns.c.numeric_scale],
                row[columns.c.column_default]
            )
            if include_columns and name not in include_columns:
                continue

            args = []
            for a in (charlen, numericprec, numericscale):
                if a is not None:
                    args.append(a)
            coltype = self.ischema_names.get(type, None)
            if coltype == MSString and charlen == -1:
                coltype = MSText()
            else:
                if coltype is None:
                    util.warn("Did not recognize type '%s' of column '%s'" %
                              (type, name))
                    coltype = sqltypes.NULLTYPE

                elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1:
                    args[0] = None
                coltype = coltype(*args)
            colargs= []
            if default is not None:
                colargs.append(schema.PassiveDefault(sql.text(default)))

            table.append_column(schema.Column(name, coltype, nullable=nullable, autoincrement=False, *colargs))

        if not found_table:
            raise exceptions.NoSuchTableError(table.name)

        # We also run an sp_columns to check for identity columns:
        cursor = connection.execute("sp_columns @table_name = '%s', @table_owner = '%s'" % (table.name, current_schema))
        ic = None
        while True:
            row = cursor.fetchone()
            if row is None:
                break
            col_name, type_name = row[3], row[5]
            if type_name.endswith("identity"):
                ic = table.c[col_name]
                ic.autoincrement = True
                # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute
                ic.sequence = schema.Sequence(ic.name + '_identity')
                # MSSQL: only one identity per table allowed
                cursor.close()
                break
        if not ic is None:
            try:
                cursor = connection.execute("select ident_seed(?), ident_incr(?)", table.fullname, table.fullname)
                row = cursor.fetchone()
                cursor.close()
                if not row is None:
                    ic.sequence.start=int(row[0])
                    ic.sequence.increment=int(row[1])
            except:
                # ignoring it, works just like before
                pass

        # Add constraints
        RR = self.uppercase_table(ischema.ref_constraints)    #information_schema.referential_constraints
        TC = self.uppercase_table(ischema.constraints)        #information_schema.table_constraints
        C  = self.uppercase_table(ischema.pg_key_constraints).alias('C') #information_schema.constraint_column_usage: the constrained column
        R  = self.uppercase_table(ischema.pg_key_constraints).alias('R') #information_schema.constraint_column_usage: the referenced column

        # Primary key constraints
        s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name,
                                                                         C.c.table_name == table.name))
        c = connection.execute(s)
        for row in c:
            if 'PRIMARY' in row[TC.c.constraint_type.name]:
                table.primary_key.add(table.c[row[0]])

        # Foreign key constraints
        s = sql.select([C.c.column_name,
                        R.c.table_schema, R.c.table_name, R.c.column_name,
                        RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule],
                       sql.and_(C.c.table_name == table.name,
                                C.c.table_schema == (table.schema or current_schema),
                                C.c.constraint_name == RR.c.constraint_name,
                                R.c.constraint_name == RR.c.unique_constraint_name,
                                C.c.ordinal_position == R.c.ordinal_position
                                ),
                       order_by = [RR.c.constraint_name, R.c.ordinal_position])
        rows = connection.execute(s).fetchall()

        def _gen_fkref(table, rschema, rtbl, rcol):
            if table.schema and rschema != table.schema or rschema != current_schema:
                return '.'.join([rschema, rtbl, rcol])
            else:
                return '.'.join([rtbl, rcol])

        # group rows by constraint ID, to handle multi-column FKs
        fknm, scols, rcols = (None, [], [])
        for r in rows:
            scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r

            if table.schema and rschema != table.schema or rschema != current_schema:
                schema.Table(rtbl, table.metadata, schema=rschema, autoload=True, autoload_with=connection)
            else:
                schema.Table(rtbl, table.metadata, autoload=True, autoload_with=connection)
                
            if rfknm != fknm:
                if fknm:
                    table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
                fknm, scols, rcols = (rfknm, [], [])
            if (not scol in scols): scols.append(scol)
            if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol))

        if fknm and scols:
            table.append_constraint(schema.ForeignKeyConstraint(scols, [_gen_fkref(table,s,t,c) for s,t,c in rcols], fknm))
Esempio n. 28
0
class AuditTable(db.Model):
    # query against the "auditdb" database session
    __bind_key__ = "auditdb"

    __table_args__ = {
        "implicit_returning": implicit_returning,
    }

    __tablename__ = "audit"
    id = Column(
        types.Integer,
        schema.Sequence("audit_seq_id", optional=True),
        primary_key=True,
    )
    timestamp = Column(types.Unicode(30), default=now, index=True)
    signature = Column(types.Unicode(512), default="")
    action = Column(types.Unicode(30), index=True)
    success = Column(types.Unicode(30), default="0")
    serial = Column(types.Unicode(30), index=True)
    tokentype = Column(types.Unicode(40))
    user = Column(types.Unicode(255), index=True)
    realm = Column(types.Unicode(255), index=True)
    administrator = Column(types.Unicode(255))
    action_detail = Column(types.Unicode(512), default="")
    info = Column(types.Unicode(512), default="")
    linotp_server = Column(types.Unicode(80))
    client = Column(types.Unicode(80))
    log_level = Column(types.Unicode(20), default="INFO", index=True)
    clearance_level = Column(types.Integer, default=0)

    @validates(
        "serial",
        "action",
        "success",
        "tokentype",
        "user",
        "realm",
        "administrator",
        "linotp_server",
        "client",
        "log_level",
    )
    def convert_str(self, key, value):
        """
        Converts the validated column to string on insert
        and truncates the values if necessary
        """
        error_on_truncate = current_app.config["AUDIT_ERROR_ON_TRUNCATION"]
        return self.validate_truncate(
            key,
            str(value or ""),
            warn=True,
            error=error_on_truncate,
        )

    @validates("action_detail", "info")
    def validate_truncate(self, key, value, warn=False, error=False):
        """
        Silently truncates the validated column if value is exceeding column
        length.
        If called manually, can be used to log a warning or throw an exception
        on truncation.
        """
        max_len = getattr(self.__class__, key).prop.columns[0].type.length
        if value and len(value) > max_len:
            if warn:
                log.warning(f"truncating audit data: [audit.{key}] {value}")
            if error:
                raise ValueError(
                    f"Audit data too long, not truncating [audit.{key}] {value}"
                    " because AUDIT_ERROR_ON_TRUNCATION is active.")

            value = value[:max_len - 1] + "…"
        return value
Esempio n. 29
0
    def reflecttable(self, connection, table, include_columns):
        # This is defined in the function, as it relies on win32com constants,
        # that aren't imported until dbapi method is called
        if not hasattr(self, 'ischema_names'):
            self.ischema_names = {
                const.dbByte: AcBinary,
                const.dbInteger: AcInteger,
                const.dbLong: AcInteger,
                const.dbSingle: AcFloat,
                const.dbDouble: AcFloat,
                const.dbDate: AcDateTime,
                const.dbLongBinary: AcBinary,
                const.dbMemo: AcText,
                const.dbBoolean: AcBoolean,
                const.dbText: AcUnicode,  # All Access strings are unicode
                const.dbCurrency: AcNumeric,
            }

        # A fresh DAO connection is opened for each reflection
        # This is necessary, so we get the latest updates
        dtbs = daoEngine.OpenDatabase(connection.engine.url.database)

        try:
            for tbl in dtbs.TableDefs:
                if tbl.Name.lower() == table.name.lower():
                    break
            else:
                raise exc.NoSuchTableError(table.name)

            for col in tbl.Fields:
                coltype = self.ischema_names[col.Type]
                if col.Type == const.dbText:
                    coltype = coltype(col.Size)

                colargs = \
                {
                    'nullable': not(col.Required or col.Attributes & const.dbAutoIncrField),
                }
                default = col.DefaultValue

                if col.Attributes & const.dbAutoIncrField:
                    colargs['default'] = schema.Sequence(col.Name + '_seq')
                elif default:
                    if col.Type == const.dbBoolean:
                        default = default == 'Yes' and '1' or '0'
                    colargs['server_default'] = schema.DefaultClause(
                        sql.text(default))

                table.append_column(schema.Column(col.Name, coltype,
                                                  **colargs))

                # TBD: check constraints

            # Find primary key columns first
            for idx in tbl.Indexes:
                if idx.Primary:
                    for col in idx.Fields:
                        thecol = table.c[col.Name]
                        table.primary_key.add(thecol)
                        if isinstance(thecol.type, AcInteger) and \
                                not (thecol.default and isinstance(thecol.default.arg, schema.Sequence)):
                            thecol.autoincrement = False

            # Then add other indexes
            for idx in tbl.Indexes:
                if not idx.Primary:
                    if len(idx.Fields) == 1:
                        col = table.c[idx.Fields[0].Name]
                        if not col.primary_key:
                            col.index = True
                            col.unique = idx.Unique
                    else:
                        pass  # TBD: multi-column indexes

            for fk in dtbs.Relations:
                if fk.ForeignTable != table.name:
                    continue
                scols = [c.ForeignName for c in fk.Fields]
                rcols = ['%s.%s' % (fk.Table, c.Name) for c in fk.Fields]
                table.append_constraint(
                    schema.ForeignKeyConstraint(scols,
                                                rcols,
                                                link_to_name=True))

        finally:
            dtbs.Close()
Esempio n. 30
0
class Invoice(meta.BaseObject):
    """An invoice."""

    __tablename__ = "invoice"

    id = schema.Column(types.Integer(),
                       schema.Sequence("invoice_id_seq", optional=True),
                       primary_key=True,
                       autoincrement=True)
    _number = schema.Column("number", types.Integer())
    customer_id = schema.Column(types.Integer(),
                                schema.ForeignKey(Customer.id,
                                                  onupdate="CASCADE",
                                                  ondelete="CASCADE"),
                                nullable=False)
    customer = orm.relationship(Customer, backref="invoices")
    sent = schema.Column(types.Date())
    payment_term = schema.Column(types.Integer(), nullable=False, default=30)
    paid = schema.Column(types.Date())
    note = schema.Column(types.UnicodeText())

    @orm.reconstructor
    def _add_acls(self):
        account_id = self.customer.account_id
        self.__acl__ = [(security.Allow, account_id, ("comment", "view"))]
        if not self.sent:
            self.__acl__.append(
                (security.Allow, account_id, ("delete", "edit")))
            if len(self.entries):
                self.__acl__.append((security.Allow, account_id, "send"))
        if self.sent and not self.paid:
            self.__acl__.append((security.Allow, account_id, "mark-paid"))

    @property
    def due(self):
        if self.sent:
            return self.sent + datetime.timedelta(days=self.payment_term)
        return None

    def total(self, type="gross"):
        assert type in ["gross", "net", "vat"]
        gross = sum([entry.total for entry in self.entries])
        if type == "gross":
            return gross

        vat = sum([v[1] for v in self.VAT()])
        if type == "vat":
            return vat
        return gross + vat

    def VAT(self):
        totals = {}
        for entry in self.entries:
            if not entry.vat:
                continue
            current = entry.total
            totals[entry.vat] = totals.get(entry.vat, 0) + current
        for (vat, total) in totals.items():
            totals[vat] = (totals[vat] * vat) / 100
        return sorted(totals.items())

    @synonym_for("_number")
    @property
    def number(self):
        if not self._number:
            return None
        return "%s.%04d" % (self.customer.invoice_code, self._number)

    def state(self):
        if not self.sent:
            return "unsend"
        elif self.paid:
            return "paid"
        today = datetime.date.today()
        due = self.sent + datetime.timedelta(days=self.payment_term)
        if due < today:
            return "overdue"
        else:
            return "pending"

    def overdue(self):
        if self.paid or not self.sent:
            return None
        today = datetime.date.today()
        due = self.sent + datetime.timedelta(days=self.payment_term)
        if due >= today:
            return None
        return (today - due).days

    def send(self):
        assert self.sent is None
        self.sent = datetime.datetime.now()
        self._number = self.customer.account.newInvoiceNumber()