Exemplo n.º 1
0
 def primary_key_constraint(self, name, table_name, cols, schema=None):
     m = self.metadata()
     columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
     t = sa_schema.Table(table_name, m, *columns, schema=schema)
     p = sa_schema.PrimaryKeyConstraint(*[t.c[n] for n in cols], name=name)
     t.append_constraint(p)
     return p
Exemplo n.º 2
0
class Price(Base):
    __tablename__ = "prices"
    __table_args__ = (schema.PrimaryKeyConstraint('realm_id', 'item_id',
                                                  'day'), )
    #(Index("price_finder", 'day', 'realm_id', 'item_id', 'faction'),)

    #id = Column(Integer, primary_key=True)

    day = Column(Date, default=func.now(), primary_key=True)

    realm_id = Column(Integer, ForeignKey("realms.id"), primary_key=True)
    realm = relationship("Realm")

    item_id = Column(Integer, primary_key=True)  #, ForeignKey("item.id"))
    #item = relationship("Item", backref=backref("prices", order_by=-day, primaryjoin = item_id == Item.id))

    buyout = Column(BigInteger)  # Average buyout
    bid = Column(BigInteger)  # Average bid
    quantity = Column(Integer)  # Number of items seen this day
    average_counter = Column(Integer)

    def __init__(self, day, realm, item, buyout, bid, quantity):
        self.day = day
        self.realm = realm
        self.item_id = item
        self.buyout = buyout
        self.bid = bid
        self.quantity = quantity
        self.average_counter = 0
Exemplo n.º 3
0
 def _primary_key_constraint(self, name, table_name, cols, schema=None):
     m = sa_schema.MetaData()
     columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
     t1 = sa_schema.Table(table_name, m, *columns, schema=schema)
     p = sa_schema.PrimaryKeyConstraint(*columns, name=name)
     t1.append_constraint(p)
     return p
Exemplo n.º 4
0
 def primary_key_constraint(self,
                            name: Optional[str],
                            table_name: str,
                            cols: Sequence[str],
                            schema: Optional[str] = None,
                            **dialect_kw) -> "PrimaryKeyConstraint":
     m = self.metadata()
     columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
     t = sa_schema.Table(table_name, m, *columns, schema=schema)
     p = sa_schema.PrimaryKeyConstraint(*[t.c[n] for n in cols],
                                        name=name,
                                        **dialect_kw)
     return p
Exemplo n.º 5
0
    def reflecttable(self, table, include_columns, exclude_columns=()):
        """Given a Table object, load its internal constructs based on
        introspection.

        This is the underlying method used by most dialects to produce
        table reflection.  Direct usage is like::

            from sqlalchemy import create_engine, MetaData, Table
            from sqlalchemy.engine import reflection

            engine = create_engine('...')
            meta = MetaData()
            user_table = Table('user', meta)
            insp = Inspector.from_engine(engine)
            insp.reflecttable(user_table, None)

        :param table: a :class:`~sqlalchemy.schema.Table` instance.
        :param include_columns: a list of string column names to include
          in the reflection process.  If ``None``, all columns are reflected.

        """
        dialect = self.bind.dialect

        # table attributes we might need.
        reflection_options = {}

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in list(tblkw.items()):
            del tblkw[k]
            tblkw[str(k)] = v

        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)

        # columns
        found_table = False
        cols_by_orig_name = {}

        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            orig_name = col_d['name']

            name = col_d['name']
            if include_columns and name not in include_columns:
                continue
            if exclude_columns and name in exclude_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable': col_d['nullable'],
            }
            for k in ('autoincrement', 'quote', 'info', 'key'):
                if k in col_d:
                    col_kw[k] = col_d[k]

            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL
                # expression, so is wrapped in text() so that no quoting
                # occurs on re-issuance.
                colargs.append(
                    sa_schema.DefaultClause(sql.text(col_d['default']),
                                            _reflected=True))

            if 'sequence' in col_d:
                # TODO: mssql and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)

            cols_by_orig_name[orig_name] = col = \
                        sa_schema.Column(name, coltype, *colargs, **col_kw)

            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            pk_cols = [
                cols_by_orig_name[pk] for pk in pk_cons['constrained_columns']
                if pk in cols_by_orig_name and pk not in exclude_columns
            ]
            pk_cols += [
                pk for pk in table.primary_key if pk.key in exclude_columns
            ]
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(
                name=pk_cons.get('name'), *pk_cols)

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            # look for columns by orig name in cols_by_orig_name,
            # but support columns that are in-Python only as fallback
            constrained_columns = [
                cols_by_orig_name[c].key if c in cols_by_orig_name else c
                for c in fkey_d['constrained_columns']
            ]
            if exclude_columns and set(constrained_columns).intersection(
                    exclude_columns):
                continue
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                schema=referred_schema,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            if 'options' in fkey_d:
                options = fkey_d['options']
            else:
                options = {}
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns,
                                               refspec,
                                               conname,
                                               link_to_name=True,
                                               **options))
        # Indexes
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            # look for columns by orig name in cols_by_orig_name,
            # but support columns that are in-Python only as fallback
            sa_schema.Index(
                name, *[
                    cols_by_orig_name[c]
                    if c in cols_by_orig_name else table.c[c] for c in columns
                ], **dict(unique=unique))
Exemplo n.º 6
0
    __abstract__ = True

    date_created = schema.Column(types.DateTime(timezone=True),
                                 default=get_utc_now)


collection_to_content_block = schema.Table(
    'collection_to_content_block', Base.metadata,
    schema.Column('collection_id', types.Integer,
                  schema.ForeignKey('data_collections.id',
                                    ondelete='CASCADE')),
    schema.Column('content_block_id',
                  types.Integer,
                  schema.ForeignKey('content_blocks.id', ondelete='CASCADE'),
                  index=True),
    schema.PrimaryKeyConstraint('collection_id', 'content_block_id'))


class ContentBlock(AbstractModel):

    __tablename__ = 'content_blocks'

    id = schema.Column(types.Integer, primary_key=True)
    message = schema.Column(types.Text, nullable=True)

    timestamp_label = schema.Column(types.DateTime(timezone=True),
                                    default=get_utc_now,
                                    index=True)

    inbox_message_id = schema.Column(types.Integer,
                                     schema.ForeignKey('inbox_messages.id',
Exemplo n.º 7
0
    def createDB(self):
        """
        Step through and create the DB structure, relationships and constraints
        **Note that order matters here, have to define a Table before you can link to it**

        TODO this can/should all be redone using the new syntax and relations
        see: http://docs.sqlalchemy.org/en/rel_0_7/orm/relationships.html# for
        some examples.

        NOTE: if one stops using sqlite then change file_id, logging_id and file_logging_id
              to BigIntegers (sqlite doesn't know BigInteger)
        """

        self.init_db(self.user, self.password, self.db_name)
        metadata = self.metadata

        data_table = schema.Table('mission',
                                  metadata,
                                  schema.Column('mission_id',
                                                types.Integer,
                                                autoincrement=True,
                                                primary_key=True,
                                                nullable=False),
                                  schema.Column('mission_name',
                                                types.String(20),
                                                nullable=False,
                                                unique=True),
                                  schema.Column(
                                      'rootdir',
                                      types.String(150),
                                      nullable=False,
                                  ),
                                  schema.Column(
                                      'incoming_dir',
                                      types.String(150),
                                      nullable=False,
                                  ),
                                  schema.Column(
                                      'codedir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  schema.Column(
                                      'inspectordir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  schema.Column(
                                      'errordir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  extend_existing=True)

        data_table = schema.Table(
            'satellite',
            metadata,
            schema.Column('satellite_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('satellite_name', types.String(20),
                          nullable=False),  # hmm long enough?
            schema.Column(
                'mission_id',
                types.Integer,
                schema.ForeignKey('mission.mission_id'),
                nullable=False,
            ),
            schema.UniqueConstraint('satellite_name',
                                    'mission_id',
                                    name='unique_pairs_satellite'),
            extend_existing=True)

        data_table = schema.Table(
            'instrument',
            metadata,
            schema.Column('instrument_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('instrument_name', types.String(20), nullable=False),
            # hmm long enough?
            schema.Column(
                'satellite_id',
                types.Integer,
                schema.ForeignKey('satellite.satellite_id'),
                nullable=False,
            ),
            schema.UniqueConstraint('instrument_name',
                                    'satellite_id',
                                    name='unique_pairs_instrument'),
            extend_existing=True)

        data_table = schema.Table(
            'product',
            metadata,
            schema.Column('product_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('product_name',
                          types.String(100),
                          nullable=False,
                          index=True),
            # hmm long enough?
            schema.Column(
                'instrument_id',
                types.Integer,
                schema.ForeignKey('instrument.instrument_id'),
                nullable=False,
            ),
            schema.Column('relative_path', types.String(100),
                          nullable=False),  # hmm long enough?
            schema.Column('level', types.Float, nullable=False),
            schema.Column('format', types.Text,
                          nullable=False),  # hmm long enough?
            schema.Column('product_description', types.Text,
                          nullable=True),  # hmm long enough?
            schema.UniqueConstraint('product_name',
                                    'instrument_id',
                                    'relative_path',
                                    name='unique_triplet_product'),
            extend_existing=True)

        data_table = schema.Table(
            'instrumentproductlink',
            metadata,
            schema.Column('instrument_id',
                          types.Integer,
                          schema.ForeignKey('instrument.instrument_id'),
                          nullable=False),
            schema.Column('product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.PrimaryKeyConstraint('instrument_id', 'product_id'),
            extend_existing=True)

        data_table = schema.Table(
            'process',
            metadata,
            schema.Column('process_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('process_name', types.String(50),
                          nullable=False),  # hmm long enough?
            schema.Column('output_product',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=True,
                          index=True),
            schema.Column('output_timebase',
                          types.String(10),
                          nullable=True,
                          index=True),
            schema.Column('extra_params', types.Text, nullable=True),
            schema.UniqueConstraint('process_name', 'output_product'),
            extend_existing=True)

        data_table = schema.Table(
            'productprocesslink',
            metadata,
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=False),
            schema.Column('input_product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.Column('optional', types.Boolean, nullable=False),
            #                                  schema.Column('yesterday', types.Integer, nullable=False),
            #                                  schema.Column('tomorrow', types.Integer, nullable=False),
            schema.PrimaryKeyConstraint('process_id', 'input_product_id'),
            extend_existing=True)

        data_table = schema.Table(
            'file',
            metadata,
            # this was a bigint, sqlalchemy doesn't seem to like this... think here
            schema.Column('file_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=True,
                          index=True),
            schema.Column('utc_file_date',
                          types.Date,
                          nullable=True,
                          index=True),
            schema.Column('utc_start_time',
                          types.DateTime,
                          nullable=True,
                          index=True),
            schema.Column('utc_stop_time',
                          types.DateTime,
                          nullable=True,
                          index=True),
            schema.Column('data_level',
                          types.Float,
                          nullable=False,
                          index=True),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('verbose_provenance', types.Text, nullable=True),
            schema.Column('check_date', types.DateTime, nullable=True),
            schema.Column('quality_comment', types.Text, nullable=True),
            schema.Column('caveats', types.Text, nullable=True),
            schema.Column('file_create_date', types.DateTime, nullable=False),
            schema.Column('met_start_time', types.Float, nullable=True),
            schema.Column('met_stop_time', types.Float, nullable=True),
            schema.Column('exists_on_disk', types.Boolean, nullable=False),
            schema.Column('quality_checked',
                          types.Boolean,
                          nullable=True,
                          default=False),
            schema.Column('product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('process_keywords', types.Text, nullable=True),
            schema.CheckConstraint(
                'utc_stop_time is not NULL OR met_stop_time is not NULL'),
            schema.CheckConstraint(
                'utc_start_time is not NULL OR met_start_time is not NULL'),
            schema.CheckConstraint(
                'met_start_time <= met_stop_time'),  # in case of one entry
            schema.CheckConstraint(
                'utc_start_time <= utc_stop_time'),  # in case of one entry
            schema.CheckConstraint('interface_version >= 1'),
            schema.UniqueConstraint('utc_file_date',
                                    'product_id',
                                    'interface_version',
                                    'quality_comment',
                                    'revision_version',
                                    name='Unique file tuple'),
            extend_existing=True)
        schema.Index('ix_file_big',
                     data_table.columns['filename'],
                     data_table.columns['utc_file_date'],
                     data_table.columns['utc_start_time'],
                     data_table.columns['utc_stop_time'],
                     unique=True)

        data_table = schema.Table(
            'unixtime',
            metadata,
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          primary_key=True,
                          index=True),
            schema.Column('unix_start', types.Integer, index=True),
            schema.Column('unix_stop', types.Integer, index=True),
            schema.CheckConstraint('unix_start <= unix_stop'),
        )

        data_table = schema.Table(
            'filefilelink',
            metadata,
            schema.Column('source_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False,
                          index=True),
            schema.Column('resulting_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False,
                          index=True),
            schema.PrimaryKeyConstraint('source_file', 'resulting_file'),
            schema.CheckConstraint('source_file <> resulting_file'),
            # TODO this is supposed to be more general than !=
            extend_existing=True)

        data_table = schema.Table(
            'code',
            metadata,
            schema.Column('code_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=False),
            schema.Column('relative_path', types.String(100), nullable=False),
            schema.Column('code_start_date', types.Date, nullable=False),
            schema.Column('code_stop_date', types.Date, nullable=False),
            schema.Column('code_description', types.Text, nullable=False),
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=False,
                          index=True),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('output_interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('active_code',
                          types.Boolean,
                          nullable=False,
                          default=False),
            schema.Column('date_written', types.Date, nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('newest_version', types.Boolean, nullable=False),
            schema.Column('arguments', types.Text, nullable=True),
            schema.Column('ram', types.Float,
                          nullable=True),  # amanount of ram used in Gigs
            schema.Column('cpu', types.SmallInteger,
                          nullable=True),  # number of cpus used
            schema.CheckConstraint('code_start_date <= code_stop_date'),
            schema.CheckConstraint('interface_version >= 1'),
            schema.CheckConstraint('output_interface_version >= 1'),
            extend_existing=True)

        data_table = schema.Table(
            'processqueue',
            metadata,
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          primary_key=True,
                          nullable=False,
                          unique=True,
                          index=True),
            schema.Column('version_bump', types.SmallInteger, nullable=True),
            schema.Column('instrument_id',
                          types.Integer,
                          schema.ForeignKey('instrument.instrument_id'),
                          nullable=False),
            schema.CheckConstraint('version_bump is NULL or version_bump < 3'),
            extend_existing=True)

        data_table = schema.Table(
            'filecodelink',
            metadata,
            schema.Column('resulting_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False),
            schema.Column('source_code',
                          types.Integer,
                          schema.ForeignKey('code.code_id'),
                          nullable=False),
            schema.PrimaryKeyConstraint('resulting_file', 'source_code'),
            extend_existing=True)

        data_table = schema.Table('release',
                                  metadata,
                                  schema.Column(
                                      'file_id',
                                      types.Integer,
                                      schema.ForeignKey('file.file_id'),
                                      nullable=False,
                                  ),
                                  schema.Column('release_num',
                                                types.String(20),
                                                nullable=False),
                                  schema.PrimaryKeyConstraint(
                                      'file_id', 'release_num'),
                                  extend_existing=True)

        data_table = schema.Table(
            'processpidlink', metadata,
            schema.Column('ppl_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('pid', types.Integer, nullable=True),
            schema.Column('hostname', types.String(100), nullable=True),
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=True),
            schema.Column('currentlyprocessing',
                          types.Boolean,
                          nullable=True,
                          default='f'),
            schema.Column('start_time',
                          types.DateTime,
                          nullable=True,
                          default=func.now()),
            schema.Column('end_time',
                          types.DateTime,
                          nullable=True,
                          default=func.now()))

        data_table = schema.Table(
            'logging',
            metadata,
            schema.Column('logging_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('currently_processing',
                          types.Boolean,
                          nullable=False,
                          default=False),
            schema.Column('pid', types.Integer, nullable=True),
            schema.Column('processing_start_time',
                          types.DateTime,
                          nullable=False),
            # might have to be a TIMESTAMP
            schema.Column('processing_end_time', types.DateTime,
                          nullable=True),
            schema.Column('comment', types.Text, nullable=True),
            schema.Column('mission_id',
                          types.Integer,
                          schema.ForeignKey('mission.mission_id'),
                          nullable=False),
            schema.Column('user', types.String(30), nullable=False),
            schema.Column('hostname', types.String(100), nullable=False),
            # schema.PrimaryKeyConstraint('logging_id'),
            schema.CheckConstraint(
                'processing_start_time < processing_end_time'),
            extend_existing=True)

        data_table = schema.Table(
            'logging_file',
            metadata,
            schema.Column('logging_file_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('logging_id',
                          types.Integer,
                          schema.ForeignKey('logging.logging_id'),
                          nullable=False),
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False),
            schema.Column('code_id',
                          types.Integer,
                          schema.ForeignKey('code.code_id'),
                          nullable=False),
            schema.Column('comments', types.Text, nullable=True),
            # schema.PrimaryKeyConstraint('logging_file_id'),
            extend_existing=True)

        data_table = schema.Table(
            'inspector',
            metadata,
            schema.Column('inspector_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=False),
            schema.Column('relative_path', types.String(250), nullable=False),
            schema.Column('description', types.Text, nullable=False),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('output_interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('active_code',
                          types.Boolean,
                          nullable=False,
                          default=False,
                          index=True),
            schema.Column('date_written', types.Date, nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('newest_version',
                          types.Boolean,
                          nullable=False,
                          index=True),
            schema.Column('arguments', types.Text, nullable=True),
            schema.Column('product',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.CheckConstraint('interface_version >= 1'),
            schema.CheckConstraint('output_interface_version >= 1'),
            extend_existing=True)

        # TODO move this out so that the user chooses the db type
        # engine = create_engine('postgres:///' + self.filename, echo=False)
        # metadata.bind = engine

        metadata.create_all(checkfirst=True)
Exemplo n.º 8
0
    def reflecttable(self, table, include_columns):

        dialect = self.conn.dialect

        # MySQL dialect does this.  Applicable with other dialects?
        if hasattr(dialect, '_connection_charset') \
                                        and hasattr(dialect, '_adjust_casing'):
            charset = dialect._connection_charset
            dialect._adjust_casing(table)

        # table attributes we might need.
        reflection_options = dict(
            (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in tblkw.items():
            del tblkw[k]
            tblkw[str(k)] = v

        # Py2K
        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)
        # end Py2K

        # columns
        found_table = False
        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            name = col_d['name']
            if include_columns and name not in include_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable':col_d['nullable'],
            }
            if 'autoincrement' in col_d:
                col_kw['autoincrement'] = col_d['autoincrement']
            if 'quote' in col_d:
                col_kw['quote'] = col_d['quote']
                
            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL expression,
                # so is wrapped in text() so that no quoting occurs on re-issuance.
                colargs.append(sa_schema.DefaultClause(sql.text(col_d['default'])))
                
            if 'sequence' in col_d:
                # TODO: mssql, maxdb and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)
                
            col = sa_schema.Column(name, coltype, *colargs, **col_kw)
            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), 
                *[table.c[pk] for pk in pk_cons['constrained_columns']
                if pk in table.c]
            )

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            constrained_columns = fkey_d['constrained_columns']
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table, table.metadata,
                                autoload=True, schema=referred_schema,
                                autoload_with=self.conn,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table, table.metadata, autoload=True,
                                autoload_with=self.conn,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
                                               conname, link_to_name=True))
        # Indexes 
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            sa_schema.Index(name, *[table.columns[c] for c in columns], 
                         **dict(unique=unique))