예제 #1
0
def _make_index(params, conn_table):
    return sa_schema.Index(
        params['name'],
        *[conn_table.c[cname] for cname in params['column_names']],
        unique=params['unique'])
예제 #2
0
def register_schema(providers: List[str],
                    db_name: str,
                    schema_base: DeclarativeMeta,
                    entity_type: str = 'stock'):
    """
    function for register schema,please declare them before register

    :param providers: the supported providers for the schema
    :type providers:
    :param db_name: database name for the schema
    :type db_name:
    :param schema_base:
    :type schema_base:
    :param entity_type: the schema related entity_type
    :type entity_type:
    :return:
    :rtype:
    """
    schemas = []
    for item in schema_base._decl_class_registry.items():
        cls = item[1]
        if type(cls) == DeclarativeMeta:
            if dbname_map_schemas.get(db_name):
                schemas = dbname_map_schemas[db_name]
            global_schemas.append(cls)
            add_to_map_list(the_map=entity_map_schemas,
                            key=entity_type,
                            value=cls)
            schemas.append(cls)

    dbname_map_schemas[db_name] = schemas

    for provider in providers:
        # track in in  _providers
        if provider not in global_providers:
            global_providers.append(provider)

        if not provider_map_dbnames.get(provider):
            provider_map_dbnames[provider] = []
        provider_map_dbnames[provider].append(db_name)
        dbname_map_base[db_name] = schema_base

        # create the db & table
        engine = get_db_engine(provider, db_name=db_name)
        schema_base.metadata.create_all(engine)

        session_fac = get_db_session_factory(provider, db_name=db_name)
        session_fac.configure(bind=engine)

    for provider in providers:
        engine = get_db_engine(provider, db_name=db_name)

        # create index for 'timestamp','entity_id','code','report_period','updated_timestamp
        for table_name, table in iter(schema_base.metadata.tables.items()):
            index_list = []
            with engine.connect() as con:
                rs = con.execute("PRAGMA INDEX_LIST('{}')".format(table_name))
                for row in rs:
                    index_list.append(row[1])

            logger.debug('engine:{},table:{},index:{}'.format(
                engine, table_name, index_list))

            for col in [
                    'timestamp', 'entity_id', 'code', 'report_period',
                    'created_timestamp', 'updated_timestamp'
            ]:
                if col in table.c:
                    column = eval('table.c.{}'.format(col))
                    index = schema.Index('{}_{}_index'.format(table_name, col),
                                         column)
                    if index.name not in index_list:
                        index.create(engine)
            for cols in [('timestamp', 'entity_id'), ('timestamp', 'code')]:
                if (cols[0] in table.c) and (col[1] in table.c):
                    column0 = eval('table.c.{}'.format(col[0]))
                    column1 = eval('table.c.{}'.format(col[1]))
                    index = schema.Index(
                        '{}_{}_{}_index'.format(table_name, col[0], col[1]),
                        column0, column1)
                    if index.name not in index_list:
                        index.create(engine)
예제 #3
0
def _make_index(params, conn_table):
    # TODO: add .info such as 'duplicates_constraint'
    return sa_schema.Index(
        params['name'],
        *[conn_table.c[cname] for cname in params['column_names']],
        unique=params['unique'])
예제 #4
0
    def reflecttable(self, connection, table, include_columns):
        preparer = self.identifier_preparer
        if table.schema is not None:
            schema_where_clause = "n.nspname = :schema"
            schemaname = table.schema
            if isinstance(schemaname, str):
                schemaname = schemaname.decode(self.encoding)
        else:
            schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)"
            schemaname = None

        SQL_COLS = """
            SELECT a.attname,
              pg_catalog.format_type(a.atttypid, a.atttypmod),
              (SELECT substring(d.adsrc for 128) FROM pg_catalog.pg_attrdef d
               WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum AND a.atthasdef)
              AS DEFAULT,
              a.attnotnull, a.attnum, a.attrelid as table_oid
            FROM pg_catalog.pg_attribute a
            WHERE a.attrelid = (
                SELECT c.oid
                FROM pg_catalog.pg_class c
                     LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
                     WHERE (%s)
                     AND c.relname = :table_name AND c.relkind in ('r','v')
            ) AND a.attnum > 0 AND NOT a.attisdropped
            ORDER BY a.attnum
        """ % schema_where_clause

        s = sql.text(SQL_COLS,
                     bindparams=[
                         sql.bindparam('table_name', type_=sqltypes.Unicode),
                         sql.bindparam('schema', type_=sqltypes.Unicode)
                     ],
                     typemap={
                         'attname': sqltypes.Unicode,
                         'default': sqltypes.Unicode
                     })
        tablename = table.name
        if isinstance(tablename, str):
            tablename = tablename.decode(self.encoding)
        c = connection.execute(s, table_name=tablename, schema=schemaname)
        rows = c.fetchall()

        if not rows:
            raise exc.NoSuchTableError(table.name)

        domains = self._load_domains(connection)

        for name, format_type, default, notnull, attnum, table_oid in rows:
            if include_columns and name not in include_columns:
                continue

            ## strip (30) from character varying(30)
            attype = re.search('([^\([]+)', format_type).group(1)
            nullable = not notnull
            is_array = format_type.endswith('[]')

            try:
                charlen = re.search('\(([\d,]+)\)', format_type).group(1)
            except:
                charlen = False

            numericprec = False
            numericscale = False
            if attype == 'numeric':
                if charlen is False:
                    numericprec, numericscale = (None, None)
                else:
                    numericprec, numericscale = charlen.split(',')
                charlen = False
            elif attype == 'double precision':
                numericprec, numericscale = (True, False)
                charlen = False
            elif attype == 'integer':
                numericprec, numericscale = (32, 0)
                charlen = False

            args = []
            for a in (charlen, numericprec, numericscale):
                if a is None:
                    args.append(None)
                elif a is not False:
                    args.append(int(a))

            kwargs = {}
            if attype == 'timestamp with time zone':
                kwargs['timezone'] = True
            elif attype == 'timestamp without time zone':
                kwargs['timezone'] = False

            coltype = None
            if attype in ischema_names:
                coltype = ischema_names[attype]
            else:
                if attype in domains:
                    domain = domains[attype]
                    if domain['attype'] in ischema_names:
                        # A table can't override whether the domain is nullable.
                        nullable = domain['nullable']

                        if domain['default'] and not default:
                            # It can, however, override the default value, but can't set it to null.
                            default = domain['default']
                        coltype = ischema_names[domain['attype']]

            if coltype:
                coltype = coltype(*args, **kwargs)
                if is_array:
                    coltype = PGArray(coltype)
            else:
                util.warn("Did not recognize type '%s' of column '%s'" %
                          (attype, name))
                coltype = sqltypes.NULLTYPE

            colargs = []
            if default is not None:
                match = re.search(r"""(nextval\(')([^']+)('.*$)""", default)
                if match is not None:
                    # the default is related to a Sequence
                    sch = table.schema
                    if '.' not in match.group(2) and sch is not None:
                        # unconditionally quote the schema name.  this could
                        # later be enhanced to obey quoting rules / "quote schema"
                        default = match.group(1) + (
                            '"%s"' %
                            sch) + '.' + match.group(2) + match.group(3)
                colargs.append(schema.DefaultClause(sql.text(default)))
            table.append_column(
                schema.Column(name, coltype, nullable=nullable, *colargs))

        # Primary keys
        PK_SQL = """
          SELECT attname FROM pg_attribute
          WHERE attrelid = (
             SELECT indexrelid FROM pg_index i
             WHERE i.indrelid = :table
             AND i.indisprimary = 't')
          ORDER BY attnum
        """
        t = sql.text(PK_SQL, typemap={'attname': sqltypes.Unicode})
        c = connection.execute(t, table=table_oid)
        for row in c.fetchall():
            pk = row[0]
            if pk in table.c:
                col = table.c[pk]
                table.primary_key.add(col)
                if col.default is None:
                    col.autoincrement = False

        # Foreign keys
        FK_SQL = """
          SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef
          FROM  pg_catalog.pg_constraint r
          WHERE r.conrelid = :table AND r.contype = 'f'
          ORDER BY 1
        """

        t = sql.text(FK_SQL,
                     typemap={
                         'conname': sqltypes.Unicode,
                         'condef': sqltypes.Unicode
                     })
        c = connection.execute(t, table=table_oid)
        for conname, condef in c.fetchall():
            m = re.search(
                'FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)',
                condef).groups()
            (constrained_columns, referred_schema, referred_table,
             referred_columns) = m
            constrained_columns = [
                preparer._unquote_identifier(x)
                for x in re.split(r'\s*,\s*', constrained_columns)
            ]
            if referred_schema:
                referred_schema = preparer._unquote_identifier(referred_schema)
            elif table.schema is not None and table.schema == self.get_default_schema_name(
                    connection):
                # no schema (i.e. its the default schema), and the table we're
                # reflecting has the default schema explicit, then use that.
                # i.e. try to use the user's conventions
                referred_schema = table.schema
            referred_table = preparer._unquote_identifier(referred_table)
            referred_columns = [
                preparer._unquote_identifier(x)
                for x in re.split(r'\s*,\s', referred_columns)
            ]

            refspec = []
            if referred_schema is not None:
                schema.Table(referred_table,
                             table.metadata,
                             autoload=True,
                             schema=referred_schema,
                             autoload_with=connection)
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                schema.Table(referred_table,
                             table.metadata,
                             autoload=True,
                             autoload_with=connection)
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))

            table.append_constraint(
                schema.ForeignKeyConstraint(constrained_columns,
                                            refspec,
                                            conname,
                                            link_to_name=True))

        # Indexes
        IDX_SQL = """
          SELECT c.relname, i.indisunique, i.indexprs, i.indpred,
            a.attname
          FROM pg_index i, pg_class c, pg_attribute a
          WHERE i.indrelid = :table AND i.indexrelid = c.oid
            AND a.attrelid = i.indexrelid AND i.indisprimary = 'f'
          ORDER BY c.relname, a.attnum
        """
        t = sql.text(IDX_SQL, typemap={'attname': sqltypes.Unicode})
        c = connection.execute(t, table=table_oid)
        indexes = {}
        sv_idx_name = None
        for row in c.fetchall():
            idx_name, unique, expr, prd, col = row

            if expr:
                if not idx_name == sv_idx_name:
                    util.warn(
                        "Skipped unsupported reflection of expression-based index %s"
                        % idx_name)
                sv_idx_name = idx_name
                continue
            if prd and not idx_name == sv_idx_name:
                util.warn(
                    "Predicate of partial index %s ignored during reflection" %
                    idx_name)
                sv_idx_name = idx_name

            if not indexes.has_key(idx_name):
                indexes[idx_name] = [unique, []]
            indexes[idx_name][1].append(col)

        for name, (unique, columns) in indexes.items():
            schema.Index(name, *[table.columns[c] for c in columns],
                         **dict(unique=unique))
예제 #5
0
    def reflecttable(self, table, include_columns, exclude_columns=()):
        """Given a Table object, load its internal constructs based on introspection.

        This is the underlying method used by most dialects to produce 
        table reflection.  Direct usage is like::

            from sqlalchemy import create_engine, MetaData, Table
            from sqlalchemy.engine import reflection

            engine = create_engine('...')
            meta = MetaData()
            user_table = Table('user', meta)
            insp = Inspector.from_engine(engine)
            insp.reflecttable(user_table, None)

        :param table: a :class:`~sqlalchemy.schema.Table` instance.
        :param include_columns: a list of string column names to include
          in the reflection process.  If ``None``, all columns are reflected.

        """
        dialect = self.bind.dialect

        # table attributes we might need.
        reflection_options = dict((k, table.kwargs.get(k))
                                  for k in dialect.reflection_options
                                  if k in table.kwargs)

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in tblkw.items():
            del tblkw[k]
            tblkw[str(k)] = v

        # Py2K
        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)
        # end Py2K

        # columns
        found_table = False
        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            table.dispatch.column_reflect(table, col_d)

            name = col_d['name']
            if include_columns and name not in include_columns:
                continue
            if exclude_columns and name in exclude_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable': col_d['nullable'],
            }
            for k in ('autoincrement', 'quote', 'info', 'key'):
                if k in col_d:
                    col_kw[k] = col_d[k]

            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL expression,
                # so is wrapped in text() so that no quoting occurs on re-issuance.
                colargs.append(
                    sa_schema.DefaultClause(sql.text(col_d['default']),
                                            _reflected=True))

            if 'sequence' in col_d:
                # TODO: mssql, maxdb and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)

            col = sa_schema.Column(name, coltype, *colargs, **col_kw)
            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            pk_cols = [
                table.c[pk] for pk in pk_cons['constrained_columns']
                if pk in table.c and pk not in exclude_columns
            ] + [pk for pk in table.primary_key if pk.key in exclude_columns]
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(
                name=pk_cons.get('name'), *pk_cols)

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            constrained_columns = fkey_d['constrained_columns']
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                schema=referred_schema,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table,
                                table.metadata,
                                autoload=True,
                                autoload_with=self.bind,
                                **reflection_options)
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns,
                                               refspec,
                                               conname,
                                               link_to_name=True))
        # Indexes
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            sa_schema.Index(name, *[table.columns[c] for c in columns],
                            **dict(unique=unique))
예제 #6
0
def Index(*args, **kws):
    index = schema.Index(*args, **kws)
    db.metadata.__dict__.setdefault("index_list", []).append(index)
예제 #7
0
 def _index(self, name, tablename, columns, schema=None, **kw):
     t = sa_schema.Table(tablename or 'no_table',
                         sa_schema.MetaData(),
                         *[sa_schema.Column(n, NULLTYPE) for n in columns],
                         schema=schema)
     return sa_schema.Index(name, *list(t.c), **kw)
예제 #8
0
    def createDB(self):
        """
        Step through and create the DB structure, relationships and constraints
        **Note that order matters here, have to define a Table before you can link to it**

        TODO this can/should all be redone using the new syntax and relations
        see: http://docs.sqlalchemy.org/en/rel_0_7/orm/relationships.html# for
        some examples.

        NOTE: if one stops using sqlite then change file_id, logging_id and file_logging_id
              to BigIntegers (sqlite doesn't know BigInteger)
        """

        self.init_db(self.user, self.password, self.db_name)
        metadata = self.metadata

        data_table = schema.Table('mission',
                                  metadata,
                                  schema.Column('mission_id',
                                                types.Integer,
                                                autoincrement=True,
                                                primary_key=True,
                                                nullable=False),
                                  schema.Column('mission_name',
                                                types.String(20),
                                                nullable=False,
                                                unique=True),
                                  schema.Column(
                                      'rootdir',
                                      types.String(150),
                                      nullable=False,
                                  ),
                                  schema.Column(
                                      'incoming_dir',
                                      types.String(150),
                                      nullable=False,
                                  ),
                                  schema.Column(
                                      'codedir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  schema.Column(
                                      'inspectordir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  schema.Column(
                                      'errordir',
                                      types.String(150),
                                      nullable=True,
                                  ),
                                  extend_existing=True)

        data_table = schema.Table(
            'satellite',
            metadata,
            schema.Column('satellite_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('satellite_name', types.String(20),
                          nullable=False),  # hmm long enough?
            schema.Column(
                'mission_id',
                types.Integer,
                schema.ForeignKey('mission.mission_id'),
                nullable=False,
            ),
            schema.UniqueConstraint('satellite_name',
                                    'mission_id',
                                    name='unique_pairs_satellite'),
            extend_existing=True)

        data_table = schema.Table(
            'instrument',
            metadata,
            schema.Column('instrument_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('instrument_name', types.String(20), nullable=False),
            # hmm long enough?
            schema.Column(
                'satellite_id',
                types.Integer,
                schema.ForeignKey('satellite.satellite_id'),
                nullable=False,
            ),
            schema.UniqueConstraint('instrument_name',
                                    'satellite_id',
                                    name='unique_pairs_instrument'),
            extend_existing=True)

        data_table = schema.Table(
            'product',
            metadata,
            schema.Column('product_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('product_name',
                          types.String(100),
                          nullable=False,
                          index=True),
            # hmm long enough?
            schema.Column(
                'instrument_id',
                types.Integer,
                schema.ForeignKey('instrument.instrument_id'),
                nullable=False,
            ),
            schema.Column('relative_path', types.String(100),
                          nullable=False),  # hmm long enough?
            schema.Column('level', types.Float, nullable=False),
            schema.Column('format', types.Text,
                          nullable=False),  # hmm long enough?
            schema.Column('product_description', types.Text,
                          nullable=True),  # hmm long enough?
            schema.UniqueConstraint('product_name',
                                    'instrument_id',
                                    'relative_path',
                                    name='unique_triplet_product'),
            extend_existing=True)

        data_table = schema.Table(
            'instrumentproductlink',
            metadata,
            schema.Column('instrument_id',
                          types.Integer,
                          schema.ForeignKey('instrument.instrument_id'),
                          nullable=False),
            schema.Column('product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.PrimaryKeyConstraint('instrument_id', 'product_id'),
            extend_existing=True)

        data_table = schema.Table(
            'process',
            metadata,
            schema.Column('process_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('process_name', types.String(50),
                          nullable=False),  # hmm long enough?
            schema.Column('output_product',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=True,
                          index=True),
            schema.Column('output_timebase',
                          types.String(10),
                          nullable=True,
                          index=True),
            schema.Column('extra_params', types.Text, nullable=True),
            schema.UniqueConstraint('process_name', 'output_product'),
            extend_existing=True)

        data_table = schema.Table(
            'productprocesslink',
            metadata,
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=False),
            schema.Column('input_product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.Column('optional', types.Boolean, nullable=False),
            #                                  schema.Column('yesterday', types.Integer, nullable=False),
            #                                  schema.Column('tomorrow', types.Integer, nullable=False),
            schema.PrimaryKeyConstraint('process_id', 'input_product_id'),
            extend_existing=True)

        data_table = schema.Table(
            'file',
            metadata,
            # this was a bigint, sqlalchemy doesn't seem to like this... think here
            schema.Column('file_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=True,
                          index=True),
            schema.Column('utc_file_date',
                          types.Date,
                          nullable=True,
                          index=True),
            schema.Column('utc_start_time',
                          types.DateTime,
                          nullable=True,
                          index=True),
            schema.Column('utc_stop_time',
                          types.DateTime,
                          nullable=True,
                          index=True),
            schema.Column('data_level',
                          types.Float,
                          nullable=False,
                          index=True),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('verbose_provenance', types.Text, nullable=True),
            schema.Column('check_date', types.DateTime, nullable=True),
            schema.Column('quality_comment', types.Text, nullable=True),
            schema.Column('caveats', types.Text, nullable=True),
            schema.Column('file_create_date', types.DateTime, nullable=False),
            schema.Column('met_start_time', types.Float, nullable=True),
            schema.Column('met_stop_time', types.Float, nullable=True),
            schema.Column('exists_on_disk', types.Boolean, nullable=False),
            schema.Column('quality_checked',
                          types.Boolean,
                          nullable=True,
                          default=False),
            schema.Column('product_id',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('process_keywords', types.Text, nullable=True),
            schema.CheckConstraint(
                'utc_stop_time is not NULL OR met_stop_time is not NULL'),
            schema.CheckConstraint(
                'utc_start_time is not NULL OR met_start_time is not NULL'),
            schema.CheckConstraint(
                'met_start_time <= met_stop_time'),  # in case of one entry
            schema.CheckConstraint(
                'utc_start_time <= utc_stop_time'),  # in case of one entry
            schema.CheckConstraint('interface_version >= 1'),
            schema.UniqueConstraint('utc_file_date',
                                    'product_id',
                                    'interface_version',
                                    'quality_comment',
                                    'revision_version',
                                    name='Unique file tuple'),
            extend_existing=True)
        schema.Index('ix_file_big',
                     data_table.columns['filename'],
                     data_table.columns['utc_file_date'],
                     data_table.columns['utc_start_time'],
                     data_table.columns['utc_stop_time'],
                     unique=True)

        data_table = schema.Table(
            'unixtime',
            metadata,
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          primary_key=True,
                          index=True),
            schema.Column('unix_start', types.Integer, index=True),
            schema.Column('unix_stop', types.Integer, index=True),
            schema.CheckConstraint('unix_start <= unix_stop'),
        )

        data_table = schema.Table(
            'filefilelink',
            metadata,
            schema.Column('source_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False,
                          index=True),
            schema.Column('resulting_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False,
                          index=True),
            schema.PrimaryKeyConstraint('source_file', 'resulting_file'),
            schema.CheckConstraint('source_file <> resulting_file'),
            # TODO this is supposed to be more general than !=
            extend_existing=True)

        data_table = schema.Table(
            'code',
            metadata,
            schema.Column('code_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=False),
            schema.Column('relative_path', types.String(100), nullable=False),
            schema.Column('code_start_date', types.Date, nullable=False),
            schema.Column('code_stop_date', types.Date, nullable=False),
            schema.Column('code_description', types.Text, nullable=False),
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=False,
                          index=True),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('output_interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('active_code',
                          types.Boolean,
                          nullable=False,
                          default=False),
            schema.Column('date_written', types.Date, nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('newest_version', types.Boolean, nullable=False),
            schema.Column('arguments', types.Text, nullable=True),
            schema.Column('ram', types.Float,
                          nullable=True),  # amanount of ram used in Gigs
            schema.Column('cpu', types.SmallInteger,
                          nullable=True),  # number of cpus used
            schema.CheckConstraint('code_start_date <= code_stop_date'),
            schema.CheckConstraint('interface_version >= 1'),
            schema.CheckConstraint('output_interface_version >= 1'),
            extend_existing=True)

        data_table = schema.Table(
            'processqueue',
            metadata,
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          primary_key=True,
                          nullable=False,
                          unique=True,
                          index=True),
            schema.Column('version_bump', types.SmallInteger, nullable=True),
            schema.Column('instrument_id',
                          types.Integer,
                          schema.ForeignKey('instrument.instrument_id'),
                          nullable=False),
            schema.CheckConstraint('version_bump is NULL or version_bump < 3'),
            extend_existing=True)

        data_table = schema.Table(
            'filecodelink',
            metadata,
            schema.Column('resulting_file',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False),
            schema.Column('source_code',
                          types.Integer,
                          schema.ForeignKey('code.code_id'),
                          nullable=False),
            schema.PrimaryKeyConstraint('resulting_file', 'source_code'),
            extend_existing=True)

        data_table = schema.Table('release',
                                  metadata,
                                  schema.Column(
                                      'file_id',
                                      types.Integer,
                                      schema.ForeignKey('file.file_id'),
                                      nullable=False,
                                  ),
                                  schema.Column('release_num',
                                                types.String(20),
                                                nullable=False),
                                  schema.PrimaryKeyConstraint(
                                      'file_id', 'release_num'),
                                  extend_existing=True)

        data_table = schema.Table(
            'processpidlink', metadata,
            schema.Column('ppl_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('pid', types.Integer, nullable=True),
            schema.Column('hostname', types.String(100), nullable=True),
            schema.Column('process_id',
                          types.Integer,
                          schema.ForeignKey('process.process_id'),
                          nullable=True),
            schema.Column('currentlyprocessing',
                          types.Boolean,
                          nullable=True,
                          default='f'),
            schema.Column('start_time',
                          types.DateTime,
                          nullable=True,
                          default=func.now()),
            schema.Column('end_time',
                          types.DateTime,
                          nullable=True,
                          default=func.now()))

        data_table = schema.Table(
            'logging',
            metadata,
            schema.Column('logging_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('currently_processing',
                          types.Boolean,
                          nullable=False,
                          default=False),
            schema.Column('pid', types.Integer, nullable=True),
            schema.Column('processing_start_time',
                          types.DateTime,
                          nullable=False),
            # might have to be a TIMESTAMP
            schema.Column('processing_end_time', types.DateTime,
                          nullable=True),
            schema.Column('comment', types.Text, nullable=True),
            schema.Column('mission_id',
                          types.Integer,
                          schema.ForeignKey('mission.mission_id'),
                          nullable=False),
            schema.Column('user', types.String(30), nullable=False),
            schema.Column('hostname', types.String(100), nullable=False),
            # schema.PrimaryKeyConstraint('logging_id'),
            schema.CheckConstraint(
                'processing_start_time < processing_end_time'),
            extend_existing=True)

        data_table = schema.Table(
            'logging_file',
            metadata,
            schema.Column('logging_file_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False),
            schema.Column('logging_id',
                          types.Integer,
                          schema.ForeignKey('logging.logging_id'),
                          nullable=False),
            schema.Column('file_id',
                          types.Integer,
                          schema.ForeignKey('file.file_id'),
                          nullable=False),
            schema.Column('code_id',
                          types.Integer,
                          schema.ForeignKey('code.code_id'),
                          nullable=False),
            schema.Column('comments', types.Text, nullable=True),
            # schema.PrimaryKeyConstraint('logging_file_id'),
            extend_existing=True)

        data_table = schema.Table(
            'inspector',
            metadata,
            schema.Column('inspector_id',
                          types.Integer,
                          autoincrement=True,
                          primary_key=True,
                          nullable=False,
                          index=True),
            schema.Column('filename',
                          types.String(250),
                          nullable=False,
                          unique=False),
            schema.Column('relative_path', types.String(250), nullable=False),
            schema.Column('description', types.Text, nullable=False),
            schema.Column('interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('quality_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('revision_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('output_interface_version',
                          types.SmallInteger,
                          nullable=False),
            schema.Column('active_code',
                          types.Boolean,
                          nullable=False,
                          default=False,
                          index=True),
            schema.Column('date_written', types.Date, nullable=False),
            schema.Column('shasum', types.String(40), nullable=True),
            schema.Column('newest_version',
                          types.Boolean,
                          nullable=False,
                          index=True),
            schema.Column('arguments', types.Text, nullable=True),
            schema.Column('product',
                          types.Integer,
                          schema.ForeignKey('product.product_id'),
                          nullable=False),
            schema.CheckConstraint('interface_version >= 1'),
            schema.CheckConstraint('output_interface_version >= 1'),
            extend_existing=True)

        # TODO move this out so that the user chooses the db type
        # engine = create_engine('postgres:///' + self.filename, echo=False)
        # metadata.bind = engine

        metadata.create_all(checkfirst=True)
예제 #9
0
 def _index(self, name, tablename, columns, schema=None, **kw):
     t = sa_schema.Table(tablename or 'no_table',
                         self._metadata(),
                         *[sa_schema.Column(n, NULLTYPE) for n in columns],
                         schema=schema)
     return sa_schema.Index(name, *[t.c[n] for n in columns], **kw)
예제 #10
0
    def reflecttable(self, table, include_columns):

        dialect = self.conn.dialect

        # MySQL dialect does this.  Applicable with other dialects?
        if hasattr(dialect, '_connection_charset') \
                                        and hasattr(dialect, '_adjust_casing'):
            charset = dialect._connection_charset
            dialect._adjust_casing(table)

        # table attributes we might need.
        reflection_options = dict(
            (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)

        schema = table.schema
        table_name = table.name

        # apply table options
        tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
        if tbl_opts:
            table.kwargs.update(tbl_opts)

        # table.kwargs will need to be passed to each reflection method.  Make
        # sure keywords are strings.
        tblkw = table.kwargs.copy()
        for (k, v) in tblkw.items():
            del tblkw[k]
            tblkw[str(k)] = v

        # Py2K
        if isinstance(schema, str):
            schema = schema.decode(dialect.encoding)
        if isinstance(table_name, str):
            table_name = table_name.decode(dialect.encoding)
        # end Py2K

        # columns
        found_table = False
        for col_d in self.get_columns(table_name, schema, **tblkw):
            found_table = True
            name = col_d['name']
            if include_columns and name not in include_columns:
                continue

            coltype = col_d['type']
            col_kw = {
                'nullable':col_d['nullable'],
            }
            if 'autoincrement' in col_d:
                col_kw['autoincrement'] = col_d['autoincrement']
            if 'quote' in col_d:
                col_kw['quote'] = col_d['quote']
                
            colargs = []
            if col_d.get('default') is not None:
                # the "default" value is assumed to be a literal SQL expression,
                # so is wrapped in text() so that no quoting occurs on re-issuance.
                colargs.append(sa_schema.DefaultClause(sql.text(col_d['default'])))
                
            if 'sequence' in col_d:
                # TODO: mssql, maxdb and sybase are using this.
                seq = col_d['sequence']
                sequence = sa_schema.Sequence(seq['name'], 1, 1)
                if 'start' in seq:
                    sequence.start = seq['start']
                if 'increment' in seq:
                    sequence.increment = seq['increment']
                colargs.append(sequence)
                
            col = sa_schema.Column(name, coltype, *colargs, **col_kw)
            table.append_column(col)

        if not found_table:
            raise exc.NoSuchTableError(table.name)

        # Primary keys
        pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
        if pk_cons:
            primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'), 
                *[table.c[pk] for pk in pk_cons['constrained_columns']
                if pk in table.c]
            )

            table.append_constraint(primary_key_constraint)

        # Foreign keys
        fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
        for fkey_d in fkeys:
            conname = fkey_d['name']
            constrained_columns = fkey_d['constrained_columns']
            referred_schema = fkey_d['referred_schema']
            referred_table = fkey_d['referred_table']
            referred_columns = fkey_d['referred_columns']
            refspec = []
            if referred_schema is not None:
                sa_schema.Table(referred_table, table.metadata,
                                autoload=True, schema=referred_schema,
                                autoload_with=self.conn,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join(
                        [referred_schema, referred_table, column]))
            else:
                sa_schema.Table(referred_table, table.metadata, autoload=True,
                                autoload_with=self.conn,
                                **reflection_options
                                )
                for column in referred_columns:
                    refspec.append(".".join([referred_table, column]))
            table.append_constraint(
                sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
                                               conname, link_to_name=True))
        # Indexes 
        indexes = self.get_indexes(table_name, schema)
        for index_d in indexes:
            name = index_d['name']
            columns = index_d['column_names']
            unique = index_d['unique']
            flavor = index_d.get('type', 'unknown type')
            if include_columns and \
                            not set(columns).issubset(include_columns):
                util.warn(
                    "Omitting %s KEY for (%s), key covers omitted columns." %
                    (flavor, ', '.join(columns)))
                continue
            sa_schema.Index(name, *[table.columns[c] for c in columns], 
                         **dict(unique=unique))