예제 #1
0
    def do_mining(self):
        """Do the parallelized mining."""
        self.logger.info(
            f"Starting mining with {self.workers_per_model} workers per model."
        )

        # Flags to let the workers know there won't be any new tasks.
        can_finish: Dict[str, mp.synchronize.Event] = {
            etype: mp.Event()
            for etype in self.ee_models_paths
        }

        # Prepare the task queues for the workers - one task queue per model.
        task_queues: Dict[str, mp.Queue] = {
            etype: mp.Queue()
            for etype in self.ee_models_paths
        }

        # Spawn the workers according to `workers_per_model`.
        self.logger.info("Spawning the worker processes")
        worker_processes = []
        workers_by_queue: Dict[str, List[mp.Process]] = {
            queue_name: []
            for queue_name in task_queues
        }
        for etype, model_path in self.ee_models_paths.items():
            for i in range(self.workers_per_model):
                worker_name = f"{etype}_{i}"
                worker_process = mp.Process(
                    name=worker_name,
                    target=Miner.create_and_mine,
                    kwargs={
                        "database_url": self.engine.url,
                        "model_path": model_path,
                        "target_table": self.target_table,
                        "task_queue": task_queues[etype],
                        "can_finish": can_finish[etype],
                    },
                )
                worker_process.start()
                worker_processes.append(worker_process)
                workers_by_queue[etype].append(worker_process)

        # Create tasks
        self.logger.info("Creating tasks")
        self.create_tasks(task_queues, workers_by_queue)

        # Monitor the queues and the workers to decide when we're finished.
        # For a given model the work is finished when the corresponding queue
        # is empty. But it can be that all workers stop/crash before all
        # tasks are done. Therefore we need to check if anyone is still
        # working on a given queue, and if not empty we will empty it.
        while not all(flag.is_set() for flag in can_finish.values()):
            for queue_name, task_queue in task_queues.items():
                if can_finish[queue_name].is_set():
                    # This queue is already empty we've let the workers know
                    continue
                if not any(worker.is_alive()
                           for worker in workers_by_queue[queue_name]):
                    self.logger.debug(f"Emptying the {queue_name} queue")
                    while not task_queue.empty():
                        article_id = task_queue.get(timeout=1)
                        self.logger.debug(f"Got non-done task {article_id}")
                if task_queue.empty():
                    self.logger.debug(
                        f"Setting the can finish flag for the {queue_name} queue."
                    )
                    can_finish[queue_name].set()

        self.logger.info("Closing all task queues")
        for queue_name, task_queue in task_queues.items():
            self.logger.debug(
                f"Closing the reading end of the queue {queue_name}")
            # Note that this is only safe when the queue is empty. This is
            # because there's a background thread putting buffered data
            # in the queue. If the queue is not empty it might be that the
            # background thread is still transferring the data from the
            # buffer. Closing the reading end of the internal pipe actually
            # also closes the writing end, and therefore the background
            # thread will throw a BrokenPipeError as it will fail to write
            # to the closed pipe.
            task_queue.close()
            self.logger.debug(
                f"Joining the buffering thread of queue {queue_name}")
            task_queue.join_thread()

        # Wait for the processes to finish.
        self.logger.info(
            "No more new tasks, just waiting for the workers to finish")
        # We'll transfer finished workers from `worker_processes`
        # to `finished_workers`. We're done when `worker_processes` is empty.
        finished_workers: List[mp.Process] = []
        while len(worker_processes) > 0:
            self.logger.debug(
                f"Status: {len(worker_processes)} workers still alive, "
                f"{len(finished_workers)} finished.")
            # Loop through all living workers and try to join
            for process in worker_processes:
                # Don't need to wait forever - others might finish before
                process.join(timeout=1.0)
                # If the current process did finish then put it in the
                # `finished_workers` queue and do some cleaning up.
                if not process.is_alive():
                    self.logger.info(f"Worker {process.name} finished.")
                    finished_workers.append(process)
                    if process.exitcode != 0:
                        self.logger.error(
                            f"Worker {process.name} terminated with exit "
                            f"code {process.exitcode}!")

            # Remove all workers that are already in the `finished_workers`
            # list from the `worker_processes` list.
            for process in finished_workers:
                if process in worker_processes:
                    worker_processes.remove(process)

        self.logger.info("Finished mining.")

        # Create index on (article_id, paragraph_pos_in_article, start_char)
        # to speed up ORDER BY clause.
        self.logger.info("Start creating index on (par, art, char)...")
        sqlalchemy.Index(
            "index_art_par_char",
            self.mining_cache_table.c.article_id,
            self.mining_cache_table.c.paragraph_pos_in_article,
            self.mining_cache_table.c.start_char,
        ).create(bind=self.engine)
        self.logger.info("Done creating index on (par, art, char).")
예제 #2
0
파일: schema.py 프로젝트: dpays/hivemind
def build_metadata():
    """Build schema def with SqlAlchemy"""
    metadata = sa.MetaData()

    sa.Table('hive_blocks',
             metadata,
             sa.Column('num',
                       sa.Integer,
                       primary_key=True,
                       autoincrement=False),
             sa.Column('hash', CHAR(40), nullable=False),
             sa.Column('prev', CHAR(40)),
             sa.Column('txs', SMALLINT, server_default='0', nullable=False),
             sa.Column('ops', SMALLINT, server_default='0', nullable=False),
             sa.Column('created_at', sa.DateTime, nullable=False),
             sa.UniqueConstraint('hash', name='hive_blocks_ux1'),
             sa.ForeignKeyConstraint(['prev'], ['hive_blocks.hash'],
                                     name='hive_blocks_fk1'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_accounts',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('name', VARCHAR(16), nullable=False),
        sa.Column('created_at', sa.DateTime, nullable=False),
        #sa.Column('block_num', sa.Integer, nullable=False),
        sa.Column('reputation',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='25'),
        sa.Column('display_name', sa.String(20)),
        sa.Column('about', sa.String(160)),
        sa.Column('location', sa.String(30)),
        sa.Column('website', sa.String(100)),
        sa.Column('profile_image',
                  sa.String(1024),
                  nullable=False,
                  server_default=''),
        sa.Column('cover_image',
                  sa.String(1024),
                  nullable=False,
                  server_default=''),
        sa.Column('followers', sa.Integer, nullable=False, server_default='0'),
        sa.Column('following', sa.Integer, nullable=False, server_default='0'),
        sa.Column('proxy', VARCHAR(16), nullable=False, server_default=''),
        sa.Column('post_count', sa.Integer, nullable=False,
                  server_default='0'),
        sa.Column('proxy_weight',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),
        sa.Column('vote_weight',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),
        sa.Column('kb_used', sa.Integer, nullable=False, server_default='0'),
        sa.Column('rank', sa.Integer, nullable=False, server_default='0'),
        sa.Column('active_at',
                  sa.DateTime,
                  nullable=False,
                  server_default='1970-01-01 00:00:00'),
        sa.Column('cached_at',
                  sa.DateTime,
                  nullable=False,
                  server_default='1970-01-01 00:00:00'),
        sa.Column('raw_json', sa.Text),
        sa.UniqueConstraint('name', name='hive_accounts_ux1'),
        sa.Index('hive_accounts_ix1', 'vote_weight',
                 'id'),  # core: quick ranks
        sa.Index('hive_accounts_ix2', 'name', 'id'),  # core: quick id map
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_posts',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('parent_id', sa.Integer),
        sa.Column('author', VARCHAR(16), nullable=False),
        sa.Column('permlink', VARCHAR(255), nullable=False),
        sa.Column('community', VARCHAR(16), nullable=False),
        sa.Column('category', VARCHAR(255), nullable=False, server_default=''),
        sa.Column('depth', SMALLINT, nullable=False),
        sa.Column('created_at', sa.DateTime, nullable=False),
        sa.Column('is_deleted', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_pinned', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_muted', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_valid', BOOLEAN, nullable=False, server_default='1'),
        sa.Column('promoted',
                  sa.types.DECIMAL(10, 3),
                  nullable=False,
                  server_default='0'),
        sa.ForeignKeyConstraint(['author'], ['hive_accounts.name'],
                                name='hive_posts_fk1'),
        sa.ForeignKeyConstraint(['community'], ['hive_accounts.name'],
                                name='hive_posts_fk2'),
        sa.ForeignKeyConstraint(['parent_id'], ['hive_posts.id'],
                                name='hive_posts_fk3'),
        sa.UniqueConstraint('author', 'permlink', name='hive_posts_ux1'),
        sa.Index('hive_posts_ix1', 'parent_id'),  # API
        sa.Index('hive_posts_ix2', 'is_deleted', 'depth'),  # API
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    #sa.Table(
    #    'hive_tags', metadata,
    #    sa.Column('id', sa.Integer, primary_key=True),
    #    sa.Column('name', CHAR(64), nullable=False),
    #    sa.UniqueConstraint('name', name='hive_tags_ux1'),
    #    mysql_engine='InnoDB',
    #    mysql_default_charset='utf8mb4'
    #)

    sa.Table(
        'hive_post_tags',
        metadata,
        sa.Column('post_id', sa.Integer, nullable=False),
        sa.Column('tag', sa.String(32), nullable=False),
        sa.UniqueConstraint('tag', 'post_id',
                            name='hive_post_tags_ux1'),  # core
        sa.Index('hive_post_tags_ix1', 'post_id'),  # core
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_follows',
        metadata,
        sa.Column('follower', sa.Integer, nullable=False),
        sa.Column('following', sa.Integer, nullable=False),
        sa.Column('state', SMALLINT, nullable=False, server_default='1'),
        sa.Column('created_at', sa.DateTime, nullable=False),
        sa.UniqueConstraint('following', 'follower',
                            name='hive_follows_ux3'),  # core
        sa.Index('hive_follows_ix2',
                 'following',
                 'follower',
                 postgresql_where=sql_text("state = 1")),  # API
        sa.Index('hive_follows_ix3',
                 'follower',
                 'following',
                 postgresql_where=sql_text("state = 1")),  # API
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_reblogs',
        metadata,
        sa.Column('account', VARCHAR(16), nullable=False),
        sa.Column('post_id', sa.Integer, nullable=False),
        sa.Column('created_at', sa.DateTime, nullable=False),
        sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'],
                                name='hive_reblogs_fk1'),
        sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'],
                                name='hive_reblogs_fk2'),
        sa.UniqueConstraint('account', 'post_id',
                            name='hive_reblogs_ux1'),  # core
        sa.Index('hive_reblogs_ix1', 'post_id', 'account',
                 'created_at'),  # API -- TODO: seemingly unused
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table('hive_payments',
             metadata,
             sa.Column('id', sa.Integer, primary_key=True),
             sa.Column('block_num', sa.Integer, nullable=False),
             sa.Column('tx_idx', SMALLINT, nullable=False),
             sa.Column('post_id', sa.Integer, nullable=False),
             sa.Column('from_account', sa.Integer, nullable=False),
             sa.Column('to_account', sa.Integer, nullable=False),
             sa.Column('amount', sa.types.DECIMAL(10, 3), nullable=False),
             sa.Column('token', VARCHAR(5), nullable=False),
             sa.ForeignKeyConstraint(['from_account'], ['hive_accounts.id'],
                                     name='hive_payments_fk1'),
             sa.ForeignKeyConstraint(['to_account'], ['hive_accounts.id'],
                                     name='hive_payments_fk2'),
             sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'],
                                     name='hive_payments_fk3'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table('hive_communities',
             metadata,
             sa.Column('name', VARCHAR(16), primary_key=True),
             sa.Column('title', sa.String(32), nullable=False),
             sa.Column('about',
                       sa.String(255),
                       nullable=False,
                       server_default=''),
             sa.Column('description',
                       sa.String(5000),
                       nullable=False,
                       server_default=''),
             sa.Column('lang', CHAR(2), nullable=False, server_default='en'),
             sa.Column('settings', TEXT, nullable=False),
             sa.Column('type_id', SMALLINT, nullable=False,
                       server_default='0'),
             sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'),
             sa.Column('created_at', sa.DateTime, nullable=False),
             sa.ForeignKeyConstraint(['name'], ['hive_accounts.name'],
                                     name='hive_communities_fk1'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table('hive_members',
             metadata,
             sa.Column('community', VARCHAR(16), nullable=False),
             sa.Column('account', VARCHAR(16), nullable=False),
             sa.Column('is_admin', BOOLEAN, nullable=False),
             sa.Column('is_mod', BOOLEAN, nullable=False),
             sa.Column('is_approved', BOOLEAN, nullable=False),
             sa.Column('is_muted', BOOLEAN, nullable=False),
             sa.Column('title',
                       sa.String(255),
                       nullable=False,
                       server_default=''),
             sa.ForeignKeyConstraint(['community'], ['hive_communities.name'],
                                     name='hive_members_fk1'),
             sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'],
                                     name='hive_members_fk2'),
             sa.UniqueConstraint('community',
                                 'account',
                                 name='hive_members_ux1'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table('hive_flags',
             metadata,
             sa.Column('account', VARCHAR(16), nullable=False),
             sa.Column('post_id', sa.Integer, nullable=False),
             sa.Column('created_at', sa.DateTime, nullable=False),
             sa.Column('notes', sa.String(255), nullable=False),
             sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'],
                                     name='hive_flags_fk1'),
             sa.ForeignKeyConstraint(['post_id'], ['hive_posts.id'],
                                     name='hive_flags_fk2'),
             sa.UniqueConstraint('account', 'post_id', name='hive_flags_ux1'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table('hive_modlog',
             metadata,
             sa.Column('id', sa.Integer, primary_key=True),
             sa.Column('community', VARCHAR(16), nullable=False),
             sa.Column('account', VARCHAR(16), nullable=False),
             sa.Column('action', sa.String(32), nullable=False),
             sa.Column('params', sa.String(1000), nullable=False),
             sa.Column('created_at', sa.DateTime, nullable=False),
             sa.ForeignKeyConstraint(['community'], ['hive_communities.name'],
                                     name='hive_modlog_fk1'),
             sa.ForeignKeyConstraint(['account'], ['hive_accounts.name'],
                                     name='hive_modlog_fk2'),
             sa.Index('hive_modlog_ix1', 'community', 'created_at'),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_feed_cache',
        metadata,
        sa.Column('post_id', sa.Integer, nullable=False),
        sa.Column('account_id', sa.Integer, nullable=False),
        sa.Column('created_at', sa.DateTime, nullable=False),
        sa.UniqueConstraint('post_id',
                            'account_id',
                            name='hive_feed_cache_ux1'),  # core
        sa.Index('hive_feed_cache_ix1', 'account_id', 'post_id',
                 'created_at'),  # API (and rebuild?)
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table(
        'hive_posts_cache',
        metadata,
        sa.Column('post_id', sa.Integer, primary_key=True),
        sa.Column('author', VARCHAR(16), nullable=False),
        sa.Column('permlink', VARCHAR(255), nullable=False),
        sa.Column('category', VARCHAR(255), nullable=False, server_default=''),

        # important/index
        sa.Column('depth', SMALLINT, nullable=False, server_default='0'),
        sa.Column('children', SMALLINT, nullable=False, server_default='0'),

        # basic/extended-stats
        sa.Column('author_rep',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),
        sa.Column('flag_weight',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),
        sa.Column('total_votes',
                  sa.Integer,
                  nullable=False,
                  server_default='0'),
        sa.Column('up_votes', sa.Integer, nullable=False, server_default='0'),

        # basic ui fields
        sa.Column('title', sa.String(255), nullable=False, server_default=''),
        sa.Column('preview',
                  sa.String(1024),
                  nullable=False,
                  server_default=''),
        sa.Column('img_url',
                  sa.String(1024),
                  nullable=False,
                  server_default=''),

        # core stats/indexes
        sa.Column('payout',
                  sa.types.DECIMAL(10, 3),
                  nullable=False,
                  server_default='0'),
        sa.Column('promoted',
                  sa.types.DECIMAL(10, 3),
                  nullable=False,
                  server_default='0'),
        sa.Column('created_at',
                  sa.DateTime,
                  nullable=False,
                  server_default='1990-01-01'),
        sa.Column('payout_at',
                  sa.DateTime,
                  nullable=False,
                  server_default='1990-01-01'),
        sa.Column('updated_at',
                  sa.DateTime,
                  nullable=False,
                  server_default='1990-01-01'),
        sa.Column('is_paidout', BOOLEAN, nullable=False, server_default='0'),

        # ui flags/filters
        sa.Column('is_nsfw', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_declined', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_full_power', BOOLEAN, nullable=False,
                  server_default='0'),
        sa.Column('is_hidden', BOOLEAN, nullable=False, server_default='0'),
        sa.Column('is_grayed', BOOLEAN, nullable=False, server_default='0'),

        # important indexes
        sa.Column('rshares', sa.BigInteger, nullable=False,
                  server_default='0'),
        sa.Column('sc_trend',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),
        sa.Column('sc_hot',
                  sa.Float(precision=6),
                  nullable=False,
                  server_default='0'),

        # bulk data
        sa.Column('body', TEXT),
        sa.Column('votes', TEXT),
        sa.Column('json', sa.Text),
        sa.Column('raw_json', sa.Text),
        sa.Index('hive_posts_cache_ix2',
                 'promoted',
                 postgresql_where=sql_text(
                     "is_paidout = '0' AND promoted > 0")),  # API
        sa.Index('hive_posts_cache_ix3',
                 'payout_at',
                 'post_id',
                 postgresql_where=sql_text("is_paidout = '0'")),  # core
        sa.Index('hive_posts_cache_ix6', 'sc_trend', 'post_id'),  # API
        sa.Index('hive_posts_cache_ix7', 'sc_hot', 'post_id'),  # API
        mysql_engine='InnoDB',
        mysql_default_charset='utf8mb4')

    sa.Table('hive_state',
             metadata,
             sa.Column('block_num',
                       sa.Integer,
                       primary_key=True,
                       autoincrement=False),
             sa.Column('db_version', sa.Integer, nullable=False),
             sa.Column('dpay_per_mvest',
                       sa.types.DECIMAL(8, 3),
                       nullable=False),
             sa.Column('usd_per_dpay', sa.types.DECIMAL(8, 3), nullable=False),
             sa.Column('bbd_per_dpay', sa.types.DECIMAL(8, 3), nullable=False),
             sa.Column('dgpo', sa.Text, nullable=False),
             mysql_engine='InnoDB',
             mysql_default_charset='utf8mb4')

    return metadata
예제 #3
0
    make = sa.Column(sa.Unicode(255), nullable=False)
    model = sa.Column(sa.Unicode(255), nullable=False)

    @transaction
    def add(cls, make, model):
        o = cls(make=make, model=model)
        db.sess.add(o)
        return o

    @ignore_unique
    def add_iu(cls, make, model):
        cls.add(make, model)


sa.Index('uidx_sabwp_truck_makemodel', Truck.make, Truck.model, unique=True)


class CustomerType(Base, LookupMixin):
    __tablename__ = 'sabwp_customer_types'


class HasUniqueValidation(Base, DefaultMixin):
    __tablename__ = 'sabwp_has_unique_val'

    name = sa.Column(sa.String(255), nullable=False, unique=True)
    email = sa.Column(sa.String(255), nullable=False, unique=True)

    validates_unique('name', 'email')

예제 #4
0
ActionExecution.task_execution_id = sa.Column(sa.String(36),
                                              sa.ForeignKey(
                                                  TaskExecution.id,
                                                  ondelete='CASCADE'),
                                              nullable=True)

TaskExecution.action_executions = relationship(
    ActionExecution,
    backref=backref('task_execution', remote_side=[TaskExecution.id]),
    cascade='all, delete-orphan',
    foreign_keys=ActionExecution.task_execution_id,
    lazy='select',
    passive_deletes=True)

sa.Index('%s_task_execution_id' % ActionExecution.__tablename__,
         'task_execution_id')

# Many-to-one for 'WorkflowExecution' and 'TaskExecution'.

WorkflowExecution.task_execution_id = sa.Column(sa.String(36),
                                                sa.ForeignKey(
                                                    TaskExecution.id,
                                                    ondelete='CASCADE'),
                                                nullable=True)

TaskExecution.workflow_executions = relationship(
    WorkflowExecution,
    backref=backref('task_execution', remote_side=[TaskExecution.id]),
    cascade='all, delete-orphan',
    foreign_keys=WorkflowExecution.task_execution_id,
    lazy='select',
예제 #5
0
파일: model.py 프로젝트: kwuenschel/tman
        else:
            return today.year - self.birthday.year - 1


class CompetitorWeight(Base):
    __tablename__ = 'competitor_weight'
    id = sa.Column(sa.Integer, primary_key=True)
    competitor_id = sa.Column(sa.Integer,
                              sa.ForeignKey('competitor.id'),
                              index=True)
    weightDate = sa.Column(sa.Date, nullable=False)
    weight = sa.Column(sa.Float, nullable=False)


sa.Index("competitor_weight_idx00",
         CompetitorWeight.competitor_id,
         CompetitorWeight.weightDate,
         unique=True)


class Tournament(Base):
    __tablename__ = 'tournament'
    id = sa.Column(sa.Integer, primary_key=True)
    name = sa.Column(sa.String(200), nullable=False)
    start_date = sa.Column(sa.Date)
    end_date = sa.Column(sa.Date)
    competitors = orm.relationship(
        'TournamentCompetitor',
        cascade="all, delete-orphan",
        order_by="TournamentCompetitor.competitor_id",
        lazy="joined",
    )
def upgrade():

    table_name = 'export'
    audit_name = table_name + '_audit'

    for name in (table_name, audit_name):
        op.create_table(
            name,
            sa.Column('id',
                      sa.Integer,
                      primary_key=True,
                      autoincrement=True,
                      nullable=False),
            sa.Column('name', sa.Unicode, nullable=False),
            sa.Column('owner_user_id', sa.Integer, nullable=False),
            sa.Column('expand_collections', sa.Boolean, nullable=False),
            sa.Column('use_choice_labels', sa.Boolean, nullable=False),
            sa.Column('notify', sa.Boolean, nullable=False),
            sa.Column('status',
                      sa.Enum('failed',
                              'pending',
                              'complete',
                              name='export_status'),
                      nullable=False,
                      default='pending'),
            sa.Column('contents', JSON, nullable=False),
            sa.Column('create_user_id', sa.Integer, nullable=False),
            sa.Column('create_date',
                      sa.DateTime,
                      nullable=False,
                      server_default=sql.func.now()),
            sa.Column('modify_user_id', sa.Integer, nullable=False),
            sa.Column('modify_date',
                      sa.DateTime,
                      nullable=False,
                      server_default=sql.func.now()),
            sa.Column('revision',
                      sa.Integer,
                      primary_key=('audit' in name),
                      nullable=False),
            sa.Index('ix_%s_create_user_id' % name, 'create_user_id'),
            sa.Index('ix_%s_modify_user_id' % name, 'modify_user_id'),
            # Both main/audit tables keep the same check constraint names
            sa.CheckConstraint('create_date <= modify_date',
                               name='ck_%s_valid_timeline' % table_name))

    # The live table will have some extra data integrity constraints
    op.create_unique_constraint('uq_%s_name' % table_name, 'export', ['name'])
    op.create_index('ix_%s_owner_user_id' % table_name, table_name,
                    ['owner_user_id'])
    op.create_foreign_key('fk_%s_owner_user_id' % table_name,
                          table_name,
                          'user', ['create_user_id'], ['id'],
                          ondelete='CASCADE')
    op.create_foreign_key('fk_%s_create_user_id' % table_name,
                          table_name,
                          'user', ['create_user_id'], ['id'],
                          ondelete='RESTRICT')
    op.create_foreign_key('fk_%s_modify_user_id' % table_name,
                          table_name,
                          'user', ['modify_user_id'], ['id'],
                          ondelete='RESTRICT')
예제 #7
0
def downgrade(migrate_engine):
    meta = sa.MetaData(bind=migrate_engine)
    user = sa.Table(
        'user',
        meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    project = sa.Table(
        'project',
        meta,
        sa.Column('id', sa.String(255), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8',
    )

    tables = [project, user]
    for i in sorted(tables):
        i.create()

    load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
                       for table_name in TABLES)

    # Restore the sourceassoc columns and constraints
    sourceassoc = load_tables['sourceassoc']
    user_id = sa.Column('user_id', sa.String(255))
    project_id = sa.Column('project_id', sa.String(255))
    sourceassoc.create_column(user_id)
    sourceassoc.create_column(project_id)

    if migrate_engine.name != 'sqlite':
        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
        uc = UniqueConstraint('sample_id',
                              'user_id',
                              table=sourceassoc,
                              **params)
        uc.create()

        params = {}
        if migrate_engine.name == "mysql":
            params = {'name': 'uniq_sourceassoc0sample_id'}
        uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
        uc.drop()

        idx = sa.Index('idx_su', sourceassoc.c.source_id,
                       sourceassoc.c.user_id)
        idx.create(bind=migrate_engine)
        idx = sa.Index('idx_sp', sourceassoc.c.source_id,
                       sourceassoc.c.project_id)
        idx.create(bind=migrate_engine)

    # Restore the user/project columns and constraints in all tables
    for table_name, indexes in INDEXES.items():
        table = load_tables[table_name]
        for column, ref_table_name, ref_column_name in indexes:
            ref_table = load_tables[ref_table_name]
            c = getattr(Alias(table).c, column)
            except_q = exists([getattr(ref_table.c, ref_column_name)])
            q = select([c]).where(and_(c != sa.null(), not_(except_q)))
            q = q.distinct()

            # NOTE(sileht): workaround for
            # https://bitbucket.org/zzzeek/sqlalchemy/
            # issue/3044/insert-from-select-union_all
            q.select = lambda: q

            sql_ins = ref_table.insert().from_select(
                [getattr(ref_table.c, ref_column_name)], q)
            try:
                migrate_engine.execute(sql_ins)
            except TypeError:
                # from select is empty
                pass

            if migrate_engine.name != 'sqlite':
                params = {
                    'columns': [table.c[column]],
                    'refcolumns': [ref_table.c[ref_column_name]]
                }

                if (migrate_engine.name == "mysql"
                        and table_name != 'alarm_history'):
                    params['name'] = "_".join(('fk', table_name, column))
                elif (migrate_engine.name == "postgresql"
                      and table_name == "sample"):
                    # The fk contains the old table name
                    params['name'] = "_".join(('meter', column, 'fkey'))

                fkey = ForeignKeyConstraint(**params)
                fkey.create()
예제 #8
0
class Questions(SqlAlchemyBase):
    __tablename__ = 'questions'
    id = sqlalchemy.Column(sqlalchemy.Integer,
                           primary_key=True,
                           autoincrement=True)
    inner_id = sqlalchemy.Column(sqlalchemy.Integer)
    text = sqlalchemy.Column(sqlalchemy.UnicodeText)
    indexed_text = sqlalchemy.Column(sqlalchemy.UnicodeText)
    subject_id = sqlalchemy.Column(sqlalchemy.Integer,
                                   sqlalchemy.ForeignKey("subjects.id"),
                                   nullable=False)
    subject = orm.relation('Subjects', backref='questions')
    type_id = sqlalchemy.Column(sqlalchemy.Integer,
                                sqlalchemy.ForeignKey("type_question.id"),
                                nullable=False,
                                default=1)
    type = orm.relation('TypeQuestion', backref='questions')
    ordered = sqlalchemy.Column(sqlalchemy.Boolean,
                                nullable=False,
                                default=False)
    completed = sqlalchemy.Column(sqlalchemy.Boolean,
                                  nullable=False,
                                  default=False)

    text_index = sqlalchemy.Index('text_index', indexed_text)

    def __init__(self,
                 text,
                 subject_id,
                 type_id=1,
                 ordered=False,
                 completed=False,
                 inner_id=None):
        self.text = text.strip()
        self.indexed_text = get_prepare_text(text)
        self.subject_id = subject_id
        self.type_id = type_id
        self.ordered = ordered
        self.completed = completed
        self.inner_id = inner_id

    @property
    def answers(self):
        return [
            x for x in self.distractors if x.question == self and x.correct
        ]

    @property
    def name(self):
        p = re.compile(r'<.*?>')
        text = p.sub('', self.text)
        max_len = 50
        if len(text) <= max_len:
            return text
        text = text[:max_len]
        if ' ' in text:
            text = text[:text.rindex(' ')]
        return text + ' ...'

    def __str__(self):
        return self.name

    def __repr__(self):
        return f'<Questions {self.id} "{self.name}">'
예제 #9
0
def upgrade(migrate_engine):
    metadata = sa.MetaData()
    metadata.bind = migrate_engine

    sautils.Table(
        'builds', metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        # ...
    )

    sautils.Table(
        'builders', metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        # ...
    )

    sautils.Table(
        'steps', metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        # ...
    )

    test_result_sets = sautils.Table(
        'test_result_sets', metadata,

        sa.Column('id', sa.Integer, primary_key=True),

        sa.Column('builderid', sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('buildid', sa.Integer,
                  sa.ForeignKey('builds.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('stepid', sa.Integer,
                  sa.ForeignKey('steps.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('description', sa.Text, nullable=True),

        sa.Column('category', sa.Text, nullable=False),

        sa.Column('value_unit', sa.Text, nullable=False),

        sa.Column('tests_passed', sa.Integer, nullable=True),

        sa.Column('tests_failed', sa.Integer, nullable=True),

        sa.Column('complete', sa.SmallInteger, nullable=False),
    )

    test_results = sautils.Table(
        'test_results', metadata,

        sa.Column('id', sa.Integer, primary_key=True),

        sa.Column('builderid', sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('test_result_setid', sa.Integer,
                  sa.ForeignKey('test_result_sets.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('test_nameid', sa.Integer,
                  sa.ForeignKey('test_names.id', ondelete='CASCADE'),
                  nullable=True),

        sa.Column('test_code_pathid', sa.Integer,
                  sa.ForeignKey('test_code_paths.id', ondelete='CASCADE'),
                  nullable=True),

        sa.Column('line', sa.Integer, nullable=True),

        sa.Column('duration_ns', sa.Integer, nullable=True),

        sa.Column('value', sa.Text, nullable=False),
    )

    test_names = sautils.Table(
        'test_names', metadata,

        sa.Column('id', sa.Integer, primary_key=True),

        sa.Column('builderid', sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('name', sa.Text, nullable=False),
    )

    test_code_paths = sautils.Table(
        'test_code_paths', metadata,

        sa.Column('id', sa.Integer, primary_key=True),

        sa.Column('builderid', sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),

        sa.Column('path', sa.Text, nullable=False),
    )

    # create the tables
    test_result_sets.create()
    test_names.create()
    test_code_paths.create()
    test_results.create()

    # create indexes
    idx = sa.Index('test_names_name', test_names.c.builderid, test_names.c.name,
                   mysql_length={'name': 255})
    idx.create()

    idx = sa.Index('test_code_paths_path', test_code_paths.c.builderid, test_code_paths.c.path,
                   mysql_length={'path': 255})
    idx.create()
예제 #10
0
    sa.Column('name', sa.String(255), unique=True),
    sa.Column('group', sa.String(255)),
    sa.Column('groups', postgresql.ARRAY(sa.String(255))),
    sa.Column('version', sa.Integer, nullable=True),
    sa.Column('comment', sa.Text),
    sa.Column('created_at',
              AwareDateTime,
              default=lambda: datetime.datetime.utcnow(),
              nullable=True),
    sa.Column(
        'updated_at',
        AwareDateTime,
        default=lambda: datetime.datetime.utcnow(),
        onupdate=lambda: datetime.datetime.utcnow(),
        nullable=True,
    ),
)

sa.Index('idx_name_group_is_active', switches.c.name, switches.c.group,
         switches.c.is_active)
sa.Index('idx_name_group_version_is_active', switches.c.name, switches.c.group,
         switches.c.version, switches.c.is_active)

user_switches = sa.Table(
    'user_switches',
    metadata,
    sa.Column('user_id', sa.Integer, sa.ForeignKey(models.users.c.id)),
    sa.Column('switch_id', sa.Integer, sa.ForeignKey(switches.c.id)),
    sa.UniqueConstraint('switch_id', 'user_id', name='user_switch_unique'),
)
예제 #11
0
def upgrade():
    op.create_table(
        'applications',
        sa.Column('id',
                  UUID(),
                  primary_key=True,
                  server_default=sa.text("gen_random_uuid()")),
        sa.Column('name', sa.String(255), nullable=False),
        sa.Column('description', sa.Unicode(), nullable=True),
        sa.Column('created',
                  sa.DateTime(timezone=True),
                  server_default=func.now(),
                  nullable=False),
        sa.Column('updated', sa.DateTime(timezone=True), onupdate=func.now()))

    op.create_table(
        'endpoints',
        sa.Column('id',
                  UUID(),
                  primary_key=True,
                  server_default=sa.text("gen_random_uuid()")),
        sa.Column('account_id', sa.String(50), nullable=False, index=True),
        sa.Column('endpoint_type', sa.Integer(), nullable=False),
        sa.Column('enabled', sa.Boolean(), nullable=False, default=False),
        sa.Column('name', sa.String(255), nullable=False),
        sa.Column('description', sa.Unicode(), nullable=True),
        sa.Column('created',
                  sa.DateTime(timezone=True),
                  server_default=func.now(),
                  nullable=False),
        sa.Column('updated', sa.DateTime(timezone=True), onupdate=func.now()))

    # Could use JSON field in the endpoints table also for properties..
    op.create_table(
        'endpoint_webhooks',
        sa.Column('id', sa.Integer(), primary_key=True),
        sa.Column('endpoint_id', UUID(), nullable=False, index=True),
        sa.Column('url', sa.Unicode(), nullable=False),
        sa.Column('method', sa.String(10),
                  nullable=False),  # We could use enum numbering
        sa.Column('disable_ssl_verification',
                  sa.Boolean(),
                  nullable=False,
                  default=False),
        sa.Column('secret_token', sa.String(255), nullable=True),
        sa.Column('payload_transformer', sa.String(50), nullable=True))

    op.create_table(
        'endpoint_email_subscriptions',
        sa.Column('account_id',
                  sa.String(50),
                  nullable=False,
                  primary_key=True),
        sa.Column('user_id', sa.String(50), nullable=False, primary_key=True),
        sa.Column('event_type',
                  sa.String(50),
                  nullable=False,
                  primary_key=True)
        # sa.UniqueConstraint('account_id', 'user_id', 'event_type', name='unique_subscription_IX')
    )

    op.create_table(
        'email_aggregation',
        sa.Column('id', sa.Integer(), primary_key=True),
        sa.Column('account_id', sa.String(50), nullable=False),
        sa.Column('insight_id', sa.String(50), nullable=False),
        sa.Column('created',
                  sa.DateTime(timezone=True),
                  server_default=func.now(),
                  nullable=False),
        sa.Column('payload', JSONB, nullable=False),
        #     Needs to be an index of account_id + created as that's the search query
        sa.Index('IX_time_search_account_mails', 'account_id', 'created'))
예제 #12
0

def rel(table: str,
        populates: Optional[str] = None,
        secondary: Optional[sa.Table] = None):
    """Simplifies building a relation between tables"""
    kw = dict()
    if populates:
        kw['back_populates'] = populates
    if secondary is not None:
        kw['secondary'] = secondary
    return orm.relationship(table, **kw)


@alchemy(sa.UniqueConstraint('email', 'name', 'type'),
         sa.Index('idx_name', 'name'))
class Address(Base):
    """Represents an email address"""
    __tablename__ = "address"

    id = sa.Column(sa.Integer, primary_key=True)
    type = sa.Column(sa.Integer, nullable=True)
    email = sa.Column(sa.String(320), nullable=True)
    name = sa.Column(sa.String(320), nullable=True)
    messages = rel('Email', 'contacts', _email_contact)

    # pylint: disable=dangerous-default-value
    @classmethod
    def fromdict(cls, email_address, cache=dict()):
        """Build an Address from a OML Address dict"""
        name = email_address.get('@OPFContactEmailAddressName', None)
    def _create_tables_thd(self, conn):
        metadata = sa.MetaData()
        metadata.bind = conn

        self._define_old_tables(metadata)

        metadata.create_all()

        sa.Index('builds_buildrequestid',
                 self.builds.c.buildrequestid).create()
        sa.Index('builds_number',
                 self.builds.c.builderid,
                 self.builds.c.number,
                 unique=True).create()
        sa.Index('builds_buildslaveid', self.builds.c.buildslaveid).create()
        sa.Index('builds_masterid', self.builds.c.masterid).create()

        sa.Index('buildslaves_name', self.buildslaves.c.name,
                 unique=True).create()

        sa.Index('configured_slaves_buildmasterid',
                 self.configured_buildslaves.c.buildermasterid).create()
        sa.Index('configured_slaves_slaves',
                 self.configured_buildslaves.c.buildslaveid).create()
        sa.Index('configured_slaves_identity',
                 self.configured_buildslaves.c.buildermasterid,
                 self.configured_buildslaves.c.buildslaveid,
                 unique=True).create()

        sa.Index('connected_slaves_masterid',
                 self.connected_buildslaves.c.masterid).create()
        sa.Index('connected_slaves_slaves',
                 self.connected_buildslaves.c.buildslaveid).create()
        sa.Index('connected_slaves_identity',
                 self.connected_buildslaves.c.masterid,
                 self.connected_buildslaves.c.buildslaveid,
                 unique=True).create()
예제 #14
0
        def setup_thd(conn):
            metadata = sa.MetaData()
            metadata.bind = conn

            builders = sa.Table(
                'builders',
                metadata,
                sa.Column('id', sa.Integer, primary_key=True),
                sa.Column('name', sa.Text, nullable=False),
                sa.Column('name_hash', sa.String(40), nullable=False),
            )
            builders.create()

            buildsets = sa.Table(
                'buildsets',
                metadata,
                sa.Column('id', sa.Integer, primary_key=True),
                sa.Column('external_idstring', sa.String(256)),
                sa.Column('reason', sa.String(256)),
                sa.Column('submitted_at', sa.Integer, nullable=False),
                sa.Column('complete',
                          sa.SmallInteger,
                          nullable=False,
                          server_default=sa.DefaultClause("0")),
                sa.Column('complete_at', sa.Integer),
                sa.Column('results', sa.SmallInteger),
                sa.Column('parent_buildid', sa.Integer),
                sa.Column('parent_relationship', sa.Text),
            )
            buildsets.create()

            buildrequests = sa.Table(
                'buildrequests',
                metadata,
                sa.Column('id', sa.Integer, primary_key=True),
                sa.Column('buildsetid',
                          sa.Integer,
                          sa.ForeignKey("buildsets.id"),
                          nullable=False),
                sa.Column('buildername', sa.String(length=256),
                          nullable=False),
                sa.Column('priority',
                          sa.Integer,
                          nullable=False,
                          server_default=sa.DefaultClause("0")),
                sa.Column('complete',
                          sa.Integer,
                          server_default=sa.DefaultClause("0")),
                sa.Column('results', sa.SmallInteger),
                sa.Column('submitted_at', sa.Integer, nullable=False),
                sa.Column('complete_at', sa.Integer),
                sa.Column('waited_for',
                          sa.SmallInteger,
                          server_default=sa.DefaultClause("0")),
            )
            buildrequests.create()

            idx = sa.Index('buildrequests_buildsetid',
                           buildrequests.c.buildsetid)
            idx.create()
            idx = sa.Index('buildrequests_buildername',
                           buildrequests.c.buildername)
            idx.create()
            idx = sa.Index('buildrequests_complete', buildrequests.c.complete)
            idx.create()
            idx = sa.Index('buildsets_complete', buildsets.c.complete)
            idx.create()
            idx = sa.Index('buildsets_submitted_at', buildsets.c.submitted_at)
            idx.create()

            brargs = dict(buildsetid=10, priority=1, submitted_at=1234)
            conn.execute(buildsets.insert(), id=10, submitted_at=1233)
            conn.execute(builders.insert(),
                         id=20,
                         name='bldr1',
                         name_hash='88103b2fbeb05bdd81c066b58a11bcf9b0d29300')
            conn.execute(buildrequests.insert(),
                         id=30,
                         buildername='bldr1',
                         **brargs)
            conn.execute(buildrequests.insert(),
                         id=31,
                         buildername='bldr1',
                         **brargs)
            conn.execute(buildrequests.insert(),
                         id=32,
                         buildername='bldr2',
                         **brargs)
            self.assertTrue(hasattr(buildrequests.c, 'buildername'))
            self.assertFalse(hasattr(buildrequests.c, 'builderid'))
예제 #15
0
파일: models.py 프로젝트: noah8713/rally
class Workload(BASE, RallyBase):
    __tablename__ = "workloads"
    __table_args__ = (sa.Index("workload_uuid", "uuid", unique=True), )

    id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
    uuid = sa.Column(sa.String(36), default=UUID, nullable=False)

    task_uuid = sa.Column(
        sa.String(36),
        sa.ForeignKey(Task.uuid),
        nullable=False,
    )

    subtask_uuid = sa.Column(
        sa.String(36),
        sa.ForeignKey(Subtask.uuid),
        nullable=False,
    )

    subtask = sa.orm.relationship(
        Subtask,
        backref=sa.orm.backref("workloads"),
        foreign_keys=subtask_uuid,
        primaryjoin=(subtask_uuid == Subtask.uuid),
    )

    name = sa.Column(sa.String(64), nullable=False)
    description = sa.Column(sa.Text, default="")
    position = sa.Column(sa.Integer, default=0, nullable=False)

    runner = sa.Column(sa_types.JSONEncodedDict, default={}, nullable=False)

    runner_type = sa.Column(sa.String(64), nullable=False)

    contexts = sa.Column(sa_types.JSONEncodedDict, default={}, nullable=False)

    contexts_results = sa.Column(sa_types.MutableJSONEncodedList,
                                 default=[],
                                 nullable=False)

    sla = sa.Column(sa_types.JSONEncodedDict, default={}, nullable=False)

    sla_results = sa.Column(sa_types.MutableJSONEncodedDict,
                            default={},
                            nullable=False)

    args = sa.Column(sa_types.JSONEncodedDict, default={}, nullable=False)

    hooks = sa.Column(sa_types.JSONEncodedList, default=[], nullable=False)

    start_time = sa.Column(sa_types.TimeStamp)

    load_duration = sa.Column(sa.Float, default=0.0)
    full_duration = sa.Column(sa.Float, default=0.0)
    min_duration = sa.Column(sa.Float)
    max_duration = sa.Column(sa.Float)
    total_iteration_count = sa.Column(sa.Integer, default=0)
    failed_iteration_count = sa.Column(sa.Integer, default=0)

    statistics = sa.Column(sa_types.MutableJSONEncodedDict,
                           default={},
                           nullable=False)

    pass_sla = sa.Column(sa.Boolean, default=True)
    _profiling_data = sa.orm.deferred(sa.Column(sa.Text, default=""))
예제 #16
0
        foreign_keys=[filter_id],
        back_populates="candidates",
        doc="The filter that the Candidate passed",
    )
    passed_at = sa.Column(
        sa.DateTime,
        nullable=False,
        index=True,
        doc="ISO UTC time when the Candidate passed the Filter.",
    )
    passing_alert_id = sa.Column(
        sa.BigInteger,
        index=True,
        doc="ID of the latest Stream alert that passed the Filter.",
    )
    uploader_id = sa.Column(
        sa.ForeignKey("users.id", ondelete="CASCADE"),
        nullable=False,
        index=True,
        doc="ID of the user that posted the candidate",
    )


Candidate.__table_args__ = (sa.Index(
    "candidates_main_index",
    Candidate.obj_id,
    Candidate.filter_id,
    Candidate.passed_at,
    unique=True,
), )
예제 #17
0
파일: schema.py 프로젝트: zuik/dagster
ScheduleStorageSqlMetadata = db.MetaData()

JobTable = db.Table(
    "jobs",
    ScheduleStorageSqlMetadata,
    db.Column("id", db.Integer, primary_key=True, autoincrement=True),
    db.Column("job_origin_id", db.String(255), unique=True),
    db.Column("repository_origin_id", db.String(255)),
    db.Column("status", db.String(63)),
    db.Column("job_type", db.String(63), index=True),
    db.Column("job_body", db.Text),
    db.Column("create_timestamp", db.DateTime, server_default=get_current_timestamp()),
    db.Column("update_timestamp", db.DateTime, server_default=get_current_timestamp()),
)

JobTickTable = db.Table(
    "job_ticks",
    ScheduleStorageSqlMetadata,
    db.Column("id", db.Integer, primary_key=True, autoincrement=True),
    db.Column("job_origin_id", db.String(255), index=True),
    db.Column("status", db.String(63)),
    db.Column("type", db.String(63)),
    db.Column("timestamp", db.types.TIMESTAMP),
    db.Column("tick_body", db.Text),
    db.Column("create_timestamp", db.DateTime, server_default=get_current_timestamp()),
    db.Column("update_timestamp", db.DateTime, server_default=get_current_timestamp()),
)

db.Index("idx_job_tick_status", JobTickTable.c.job_origin_id, JobTickTable.c.status)
db.Index("idx_job_tick_timestamp", JobTickTable.c.job_origin_id, JobTickTable.c.timestamp)
예제 #18
0
def upgrade(migrate_engine):
    meta = sql.MetaData()
    meta.bind = migrate_engine

    if migrate_engine.name == 'mysql':
        # In Folsom we explicitly converted migrate_version to UTF8.
        migrate_engine.execute(
            'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8')
        # Set default DB charset to UTF8.
        migrate_engine.execute(
            'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' %
            migrate_engine.url.database)

    credential = sql.Table(
        'credential', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('user_id', sql.String(length=64), nullable=False),
        sql.Column('project_id', sql.String(length=64)),
        sql.Column('blob', ks_sql.JsonBlob, nullable=False),
        sql.Column('type', sql.String(length=255), nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    domain = sql.Table(
        'domain', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('name', sql.String(length=64), nullable=False),
        sql.Column('enabled', sql.Boolean, default=True, nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    endpoint = sql.Table(
        'endpoint', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('legacy_endpoint_id', sql.String(length=64)),
        sql.Column('interface', sql.String(length=8), nullable=False),
        sql.Column('region', sql.String(length=255)),
        sql.Column('service_id', sql.String(length=64), nullable=False),
        sql.Column('url', sql.Text, nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    group = sql.Table(
        'group', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('domain_id', sql.String(length=64), nullable=False),
        sql.Column('name', sql.String(length=64), nullable=False),
        sql.Column('description', sql.Text),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    group_domain_metadata = sql.Table(
        'group_domain_metadata', meta,
        sql.Column('group_id', sql.String(length=64), primary_key=True),
        sql.Column('domain_id', sql.String(length=64), primary_key=True),
        sql.Column('data', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    group_project_metadata = sql.Table(
        'group_project_metadata', meta,
        sql.Column('group_id', sql.String(length=64), primary_key=True),
        sql.Column('project_id', sql.String(length=64), primary_key=True),
        sql.Column('data', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    policy = sql.Table(
        'policy', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('type', sql.String(length=255), nullable=False),
        sql.Column('blob', ks_sql.JsonBlob, nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    project = sql.Table(
        'project', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('name', sql.String(length=64), nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        sql.Column('description', sql.Text),
        sql.Column('enabled', sql.Boolean),
        sql.Column('domain_id', sql.String(length=64), nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    role = sql.Table(
        'role', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('name', sql.String(length=255), nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    service = sql.Table(
        'service', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('type', sql.String(length=255)),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    token = sql.Table(
        'token', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('expires', sql.DateTime, default=None),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        sql.Column('valid', sql.Boolean, default=True, nullable=False),
        sql.Column('trust_id', sql.String(length=64)),
        sql.Column('user_id', sql.String(length=64)),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    trust = sql.Table(
        'trust', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('trustor_user_id', sql.String(length=64), nullable=False),
        sql.Column('trustee_user_id', sql.String(length=64), nullable=False),
        sql.Column('project_id', sql.String(length=64)),
        sql.Column('impersonation', sql.Boolean, nullable=False),
        sql.Column('deleted_at', sql.DateTime),
        sql.Column('expires_at', sql.DateTime),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    trust_role = sql.Table(
        'trust_role', meta,
        sql.Column('trust_id', sql.String(length=64), primary_key=True,
                   nullable=False),
        sql.Column('role_id', sql.String(length=64), primary_key=True,
                   nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    user = sql.Table(
        'user', meta,
        sql.Column('id', sql.String(length=64), primary_key=True),
        sql.Column('name', sql.String(length=255), nullable=False),
        sql.Column('extra', ks_sql.JsonBlob.impl),
        sql.Column('password', sql.String(length=128)),
        sql.Column('enabled', sql.Boolean),
        sql.Column('domain_id', sql.String(length=64), nullable=False),
        sql.Column('default_project_id', sql.String(length=64)),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    user_domain_metadata = sql.Table(
        'user_domain_metadata', meta,
        sql.Column('user_id', sql.String(length=64), primary_key=True),
        sql.Column('domain_id', sql.String(length=64), primary_key=True),
        sql.Column('data', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    user_group_membership = sql.Table(
        'user_group_membership', meta,
        sql.Column('user_id', sql.String(length=64), primary_key=True),
        sql.Column('group_id', sql.String(length=64), primary_key=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    user_project_metadata = sql.Table(
        'user_project_metadata', meta,
        sql.Column('user_id', sql.String(length=64), primary_key=True),
        sql.Column('project_id', sql.String(length=64), primary_key=True),
        sql.Column('data', ks_sql.JsonBlob.impl),
        mysql_engine='InnoDB',
        mysql_charset='utf8')

    # create all tables
    tables = [credential, domain, endpoint, group, group_domain_metadata,
              group_project_metadata, policy, project, role, service,
              token, trust, trust_role, user, user_domain_metadata,
              user_group_membership, user_project_metadata]

    for table in tables:
        try:
            table.create()
        except Exception:
            LOG.exception('Exception while creating table: %r', table)
            raise

    # Unique Constraints
    migrate.UniqueConstraint(user.c.domain_id,
                             user.c.name,
                             name='ixu_user_name_domain_id').create()
    migrate.UniqueConstraint(group.c.domain_id,
                             group.c.name,
                             name='ixu_group_name_domain_id').create()
    migrate.UniqueConstraint(role.c.name,
                             name='ixu_role_name').create()
    migrate.UniqueConstraint(project.c.domain_id,
                             project.c.name,
                             name='ixu_project_name_domain_id').create()
    migrate.UniqueConstraint(domain.c.name,
                             name='ixu_domain_name').create()

    # Indexes
    sql.Index('ix_token_expires', token.c.expires).create()
    sql.Index('ix_token_expires_valid', token.c.expires,
              token.c.valid).create()

    fkeys = [
        {'columns': [user_project_metadata.c.project_id],
         'references': [project.c.id],
         'name': 'fk_user_project_metadata_project_id'},

        {'columns': [user_domain_metadata.c.domain_id],
         'references': [domain.c.id],
         'name': 'fk_user_domain_metadata_domain_id'},

        {'columns': [group_project_metadata.c.project_id],
         'references': [project.c.id],
         'name': 'fk_group_project_metadata_project_id'},

        {'columns': [group_domain_metadata.c.domain_id],
         'references': [domain.c.id],
         'name': 'fk_group_domain_metadata_domain_id'},

        {'columns': [endpoint.c.service_id],
         'references': [service.c.id]},

        {'columns': [user_group_membership.c.group_id],
         'references': [group.c.id],
         'name': 'fk_user_group_membership_group_id'},

        {'columns': [user_group_membership.c.user_id],
         'references':[user.c.id],
         'name': 'fk_user_group_membership_user_id'},

        {'columns': [user.c.domain_id],
         'references': [domain.c.id],
         'name': 'fk_user_domain_id'},

        {'columns': [group.c.domain_id],
         'references': [domain.c.id],
         'name': 'fk_group_domain_id'},

        {'columns': [project.c.domain_id],
         'references': [domain.c.id],
         'name': 'fk_project_domain_id'}
    ]

    for fkey in fkeys:
        migrate.ForeignKeyConstraint(columns=fkey['columns'],
                                     refcolumns=fkey['references'],
                                     name=fkey.get('name')).create()

    # Create the default domain.
    session = orm.sessionmaker(bind=migrate_engine)()
    domain.insert(migration_helpers.get_default_domain()).execute()
    session.commit()
def upgrade():

    op.create_table(
        'aim_vmm_policies', sa.Column('type', sa.String(64), nullable=False),
        sa.Column('aim_id', sa.Integer, autoincrement=True),
        sa.Column('display_name', sa.String(256), nullable=False, default=''),
        sa.Column('monitored', sa.Boolean, nullable=False, default=False),
        sa.PrimaryKeyConstraint('aim_id'))

    session = api.get_session(expire_on_commit=True)
    old_vmm_table = sa.Table('aim_vmm_domains', sa.MetaData(),
                             sa.Column('type', sa.String(64), nullable=False),
                             sa.Column('name', sa.String(64), nullable=False))
    old_phys_table = sa.Table('aim_physical_domains', sa.MetaData(),
                              sa.Column('name', sa.String(64), nullable=False))

    mgr = aim_manager.AimManager()
    ctx = context.AimContext(db_session=session)
    new_vmms = []
    new_phys = []
    with session.begin(subtransactions=True):
        for vmm in session.query(old_vmm_table).all():
            new_vmms.append(
                resource.VMMDomain(type=vmm.type,
                                   name=vmm.name,
                                   monitored=True))
        for phys in session.query(old_phys_table).all():
            new_phys.append(
                resource.PhysicalDomain(name=phys.name, monitored=True))

    op.drop_table('aim_vmm_domains')
    op.drop_table('aim_physical_domains')

    op.create_table(
        'aim_vmm_domains', sa.Column('type', sa.String(64), nullable=False),
        sa.Column('name', sa.String(64), nullable=False),
        sa.Column('aim_id', sa.Integer, autoincrement=True),
        sa.Column('display_name', sa.String(256), nullable=False, default=''),
        sa.Column('monitored', sa.Boolean, nullable=False, default=False),
        sa.Column('enforcement_pref', sa.Enum('sw', 'hw', 'unknown')),
        sa.Column('mode', sa.Enum('default', 'n1kv', 'unknown', 'ovs', 'k8s')),
        sa.Column('mcast_address', sa.String(64)),
        sa.Column('encap_mode', sa.Enum('unknown', 'vlan', 'vxlan')),
        sa.Column('pref_encap_mode', sa.Enum('unspecified', 'vlan', 'vxlan')),
        sa.Column('vlan_pool_name', sa.String(64)),
        sa.Column('vlan_pool_type', sa.Enum('static', 'dynamic')),
        sa.Column('mcast_addr_pool_name', sa.String(64)),
        sa.PrimaryKeyConstraint('aim_id'),
        sa.UniqueConstraint('type',
                            'name',
                            name='uniq_aim_vmm_domains_identity'),
        sa.Index('idx_aim_vmm_domains_identity', 'type', 'name'))

    op.create_table(
        'aim_physical_domains', sa.Column('name',
                                          sa.String(64),
                                          nullable=False),
        sa.Column('aim_id', sa.Integer, autoincrement=True),
        sa.Column('display_name', sa.String(256), nullable=False, default=''),
        sa.Column('monitored', sa.Boolean, nullable=False, default=False),
        sa.PrimaryKeyConstraint('aim_id'))

    with session.begin(subtransactions=True):
        for obj in new_vmms + new_phys:
            mgr.create(ctx, obj)
class Metric(Base, GnocchiBase, storage.Metric):
    __tablename__ = 'metric'
    __table_args__ = (
        sqlalchemy.Index('ix_metric_status', 'status'),
        sqlalchemy.UniqueConstraint("resource_id",
                                    "name",
                                    name="uniq_metric0resource_id0name"),
        COMMON_TABLES_ARGS,
    )

    id = sqlalchemy.Column(sqlalchemy_utils.UUIDType(), primary_key=True)
    archive_policy_name = sqlalchemy.Column(
        sqlalchemy.String(255),
        sqlalchemy.ForeignKey(
            'archive_policy.name',
            ondelete="RESTRICT",
            name="fk_metric_archive_policy_name_archive_policy_name"),
        nullable=False)
    archive_policy = sqlalchemy.orm.relationship(ArchivePolicy, lazy="joined")
    created_by_user_id = sqlalchemy.Column(sqlalchemy.String(255))
    created_by_project_id = sqlalchemy.Column(sqlalchemy.String(255))
    resource_id = sqlalchemy.Column(
        sqlalchemy_utils.UUIDType(),
        sqlalchemy.ForeignKey('resource.id',
                              ondelete="SET NULL",
                              name="fk_metric_resource_id_resource_id"))
    name = sqlalchemy.Column(sqlalchemy.String(255))
    status = sqlalchemy.Column(sqlalchemy.Enum('active',
                                               'delete',
                                               name="metric_status_enum"),
                               nullable=False,
                               server_default='active')

    def jsonify(self):
        d = {
            "id": self.id,
            "created_by_user_id": self.created_by_user_id,
            "created_by_project_id": self.created_by_project_id,
            "name": self.name,
        }
        unloaded = sqlalchemy.inspect(self).unloaded
        if 'resource' in unloaded:
            d['resource_id'] = self.resource_id
        else:
            d['resource'] = self.resource
        if 'archive_policy' in unloaded:
            d['archive_policy_name'] = self.archive_policy_name
        else:
            d['archive_policy'] = self.archive_policy
        return d

    def __eq__(self, other):
        # NOTE(jd) If `other` is a SQL Metric, we only compare
        # archive_policy_name, and we don't compare archive_policy that might
        # not be loaded. Otherwise we fallback to the original comparison for
        # storage.Metric.
        return ((isinstance(other, Metric) and self.id == other.id
                 and self.archive_policy_name == other.archive_policy_name
                 and self.created_by_user_id == other.created_by_user_id
                 and self.created_by_project_id == other.created_by_project_id
                 and self.name == other.name
                 and self.resource_id == other.resource_id)
                or (storage.Metric.__eq__(self, other)))

    __hash__ = storage.Metric.__hash__
예제 #21
0
파일: models.py 프로젝트: Treora/h
 def __table_args__(self):
     return sa.Index('subs_uri_idx_%s' % self.__tablename__, 'uri'),
예제 #22
0
def join_model(
    join_table,
    model_1,
    model_2,
    column_1=None,
    column_2=None,
    fk_1="id",
    fk_2="id",
    base=Base,
):
    """Helper function to create a join table for a many-to-many relationship.

    Parameters
    ----------
    join_table : str
        Name of the new table to be created.
    model_1 : str
        First model in the relationship.
    model_2 : str
        Second model in the relationship.
    column_1 : str, optional
        Name of the join table column corresponding to `model_1`. If `None`,
        then {`table1`[:-1]_id} will be used (e.g., `user_id` for `users`).
    column_2 : str, optional
        Name of the join table column corresponding to `model_2`. If `None`,
        then {`table2`[:-1]_id} will be used (e.g., `user_id` for `users`).
    fk_1 : str, optional
        Name of the column from `model_1` that the foreign key should refer to.
    fk_2 : str, optional
        Name of the column from `model_2` that the foreign key should refer to.
    base : sqlalchemy.ext.declarative.api.DeclarativeMeta
        SQLAlchemy model base to subclass.

    Returns
    -------
    sqlalchemy.ext.declarative.api.DeclarativeMeta
        SQLAlchemy association model class
    """

    table_1 = model_1.__tablename__
    table_2 = model_2.__tablename__
    if column_1 is None:
        column_1 = f"{table_1[:-1]}_id"
    if column_2 is None:
        column_2 = f"{table_2[:-1]}_id"

    forward_ind_name = f"{join_table}_forward_ind"
    reverse_ind_name = f"{join_table}_reverse_ind"

    model_attrs = {
        "__tablename__":
        join_table,
        "id":
        sa.Column(sa.Integer,
                  primary_key=True,
                  doc="Unique object identifier."),
        column_1:
        sa.Column(
            column_1,
            sa.ForeignKey(f"{table_1}.{fk_1}", ondelete="CASCADE"),
            nullable=False,
        ),
        column_2:
        sa.Column(
            column_2,
            sa.ForeignKey(f"{table_2}.{fk_2}", ondelete="CASCADE"),
            nullable=False,
        ),
    }

    model_attrs.update({
        model_1.__name__.lower():
        relationship(
            model_1,
            cascade="save-update, merge, refresh-expire, expunge",
            foreign_keys=[model_attrs[column_1]],
        ),
        model_2.__name__.lower():
        relationship(
            model_2,
            cascade="save-update, merge, refresh-expire, expunge",
            foreign_keys=[model_attrs[column_2]],
        ),
        forward_ind_name:
        sa.Index(
            forward_ind_name,
            model_attrs[column_1],
            model_attrs[column_2],
            unique=True,
        ),
        reverse_ind_name:
        sa.Index(reverse_ind_name, model_attrs[column_2],
                 model_attrs[column_1]),
    })

    model = type(model_1.__name__ + model_2.__name__, (base, JoinModel),
                 model_attrs)
    model.read = model.create = AccessibleIfRelatedRowsAreAccessible(
        **{
            model_1.__name__.lower(): "read",
            model_2.__name__.lower(): "read"
        })
    return model
    def create_tables_thd(self, conn):
        metadata = sa.MetaData()
        metadata.bind = conn

        self.changes = sa.Table(
            'changes',
            metadata,
            sa.Column('changeid', sa.Integer, primary_key=True),
            sa.Column('author', sa.String(256), nullable=False),
            sa.Column('comments', sa.String(1024), nullable=False),
            sa.Column('is_dir', sa.SmallInteger, nullable=False),
            sa.Column('branch', sa.String(256)),
            sa.Column('revision', sa.String(256)),
            sa.Column('revlink', sa.String(256)),
            sa.Column('when_timestamp', sa.Integer, nullable=False),
            sa.Column('category', sa.String(256)),
            sa.Column('repository',
                      sa.String(length=512),
                      nullable=False,
                      server_default=''),
            sa.Column('project',
                      sa.String(length=512),
                      nullable=False,
                      server_default=''),
        )
        self.changes.create(bind=conn)

        self.schedulers = sa.Table(
            "schedulers",
            metadata,
            sa.Column('schedulerid', sa.Integer, primary_key=True),
            sa.Column('name', sa.String(128), nullable=False),
            sa.Column('class_name', sa.String(128), nullable=False),
        )
        self.schedulers.create(bind=conn)

        self.users = sa.Table(
            "users",
            metadata,
            sa.Column("uid", sa.Integer, primary_key=True),
            sa.Column("identifier", sa.String(256), nullable=False),
            sa.Column("bb_username", sa.String(128)),
            sa.Column("bb_password", sa.String(128)),
        )
        self.users.create(bind=conn)

        self.objects = sa.Table(
            "objects",
            metadata,
            sa.Column("id", sa.Integer, primary_key=True),
            sa.Column('name', sa.String(128), nullable=False),
            sa.Column('class_name', sa.String(128), nullable=False),
        )
        self.objects.create()

        self.object_state = sa.Table(
            "object_state",
            metadata,
            sa.Column("objectid",
                      sa.Integer,
                      sa.ForeignKey('objects.id'),
                      nullable=False),
            sa.Column("name", sa.String(length=256), nullable=False),
            sa.Column("value_json", sa.Text, nullable=False),
        )
        self.object_state.create()

        # these indices should already exist everywhere but on sqlite
        if conn.dialect.name != 'sqlite':
            sa.Index('name_and_class', self.schedulers.c.name,
                     self.schedulers.c.class_name).create()
            sa.Index('changes_branch', self.changes.c.branch).create()
            sa.Index('changes_revision', self.changes.c.revision).create()
            sa.Index('changes_author', self.changes.c.author).create()
            sa.Index('changes_category', self.changes.c.category).create()
            sa.Index('changes_when_timestamp',
                     self.changes.c.when_timestamp).create()

        # create this index without the unique attribute
        sa.Index('users_identifier', self.users.c.identifier).create()
예제 #24
0
def upgrade(migrate_engine):
    metadata = sa.MetaData()
    metadata.bind = migrate_engine

    # autoload the tables that are only referenced here
    changes = sa.Table('changes', metadata, autoload=True)
    objects = sa.Table('buildsets', metadata, autoload=True)
    sa.Table("objects", metadata, autoload=True)

    # drop all tables.  Schedulers will re-populate on startup

    scheduler_changes_tbl = sa.Table(
        'scheduler_changes',
        metadata,
        sa.Column('schedulerid', sa.Integer),
        # ...
    )
    scheduler_changes_tbl.drop()
    metadata.remove(scheduler_changes_tbl)

    scheduler_upstream_buildsets_tbl = sa.Table(
        'scheduler_upstream_buildsets',
        metadata,
        sa.Column('buildsetid', sa.Integer),
        # ...
    )
    scheduler_upstream_buildsets_tbl.drop()
    metadata.remove(scheduler_upstream_buildsets_tbl)

    schedulers_tbl = sa.Table(
        "schedulers",
        metadata,
        sa.Column('schedulerid', sa.Integer),
        # ...
    )
    schedulers_tbl.drop()
    metadata.remove(schedulers_tbl)

    # schedulers and scheduler_upstream_buildsets aren't coming back, but
    # scheduler_changes is -- along with its indexes

    scheduler_changes_tbl = sa.Table(
        'scheduler_changes',
        metadata,
        sa.Column('objectid', sa.Integer, sa.ForeignKey('objects.id')),
        sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid')),
        sa.Column('important', sa.Integer),
    )
    scheduler_changes_tbl.create()

    cons = constraint.ForeignKeyConstraint([scheduler_changes_tbl.c.objectid],
                                           [objects.c.id])
    cons.create()

    cons = constraint.ForeignKeyConstraint([scheduler_changes_tbl.c.changeid],
                                           [changes.c.changeid])
    cons.create()

    idx = sa.Index('scheduler_changes_objectid',
                   scheduler_changes_tbl.c.objectid)
    idx.create()

    idx = sa.Index('scheduler_changes_changeid',
                   scheduler_changes_tbl.c.changeid)
    idx.create()

    idx = sa.Index('scheduler_changes_unique',
                   scheduler_changes_tbl.c.objectid,
                   scheduler_changes_tbl.c.changeid,
                   unique=True)
    idx.create()
예제 #25
0
파일: model.py 프로젝트: levitte/buildbot
class Model(base.DBConnectorComponent):
    #
    # schema
    #

    metadata = sa.MetaData()

    # NOTES

    # * server_defaults here are included to match those added by the migration
    #   scripts, but they should not be depended on - all code accessing these
    #   tables should supply default values as necessary.  The defaults are
    #   required during migration when adding non-nullable columns to existing
    #   tables.
    #
    # * dates are stored as unix timestamps (UTC-ish epoch time)
    #
    # * sqlalchemy does not handle sa.Boolean very well on MySQL or Postgres;
    #   use sa.SmallInteger instead
    #
    # * BuildRequest.canBeCollapsed() depends on buildrequest.id being auto-incremented which is
    #   sqlalchemy default.

    # Tables related to build requests
    # --------------------------------

    # A BuildRequest is a request for a particular build to be performed.  Each
    # BuildRequest is a part of a Buildset.  BuildRequests are claimed by
    # masters, to avoid multiple masters running the same build.
    buildrequests = sautils.Table(
        'buildrequests',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('buildsetid',
                  sa.Integer,
                  sa.ForeignKey('buildsets.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('priority',
                  sa.Integer,
                  nullable=False,
                  server_default=sa.DefaultClause("0")),

        # if this is zero, then the build is still pending
        sa.Column('complete', sa.Integer,
                  server_default=sa.DefaultClause("0")),

        # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS,
        # etc - see master/buildbot/status/builder.py
        sa.Column('results', sa.SmallInteger),

        # time the buildrequest was created
        sa.Column('submitted_at', sa.Integer, nullable=False),

        # time the buildrequest was completed, or NULL
        sa.Column('complete_at', sa.Integer),

        # boolean indicating whether there is a step blocking, waiting for this
        # request to complete
        sa.Column('waited_for',
                  sa.SmallInteger,
                  server_default=sa.DefaultClause("0")),
    )

    # Each row in this table represents a claimed build request, where the
    # claim is made by the master referenced by masterid.
    buildrequest_claims = sautils.Table(
        'buildrequest_claims',
        metadata,
        sa.Column('brid',
                  sa.Integer,
                  sa.ForeignKey('buildrequests.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  index=True,
                  nullable=False),
        sa.Column('claimed_at', sa.Integer, nullable=False),
    )

    # Tables related to builds
    # ------------------------

    # This table contains the build properties
    build_properties = sautils.Table(
        'build_properties',
        metadata,
        sa.Column('buildid',
                  sa.Integer,
                  sa.ForeignKey('builds.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('name', sa.String(256), nullable=False),
        # JSON encoded value
        sa.Column('value', sa.Text, nullable=False),
        sa.Column('source', sa.String(256), nullable=False),
    )

    # This table contains transient build state.
    build_data = sautils.Table(
        'build_data',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('buildid',
                  sa.Integer,
                  sa.ForeignKey('builds.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('name', sa.String(256), nullable=False),
        sa.Column('value',
                  sa.LargeBinary().with_variant(sa.dialects.mysql.LONGBLOB,
                                                "mysql"),
                  nullable=False),
        sa.Column('length', sa.Integer, nullable=False),
        sa.Column('source', sa.String(256), nullable=False),
    )

    # This table contains basic information about each build.
    builds = sautils.Table(
        'builds',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('number', sa.Integer, nullable=False),
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        # note that there is 1:N relationship here.
        # In case of worker loss, build has results RETRY
        # and buildrequest is unclaimed.
        # We use use_alter to prevent circular reference
        # (buildrequests -> buildsets -> builds).
        sa.Column('buildrequestid',
                  sa.Integer,
                  sa.ForeignKey('buildrequests.id',
                                use_alter=True,
                                name='buildrequestid',
                                ondelete='CASCADE'),
                  nullable=False),
        # worker which performed this build
        # keep nullable to support worker-free builds
        sa.Column('workerid',
                  sa.Integer,
                  sa.ForeignKey('workers.id', ondelete='SET NULL'),
                  nullable=True),
        # master which controlled this build
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  nullable=False),
        # start/complete times
        sa.Column('started_at', sa.Integer, nullable=False),
        sa.Column('complete_at', sa.Integer),
        sa.Column('state_string', sa.Text, nullable=False),
        sa.Column('results', sa.Integer),
    )

    # Tables related to steps
    # -----------------------

    steps = sautils.Table(
        'steps',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('number', sa.Integer, nullable=False),
        sa.Column('name', sa.String(50), nullable=False),
        sa.Column('buildid',
                  sa.Integer,
                  sa.ForeignKey('builds.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('started_at', sa.Integer),
        sa.Column('complete_at', sa.Integer),
        sa.Column('state_string', sa.Text, nullable=False),
        sa.Column('results', sa.Integer),
        sa.Column('urls_json', sa.Text, nullable=False),
        sa.Column('hidden',
                  sa.SmallInteger,
                  nullable=False,
                  server_default='0'),
    )

    # Tables related to logs
    # ----------------------

    logs = sautils.Table(
        'logs',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('name', sa.Text, nullable=False),
        sa.Column('slug', sa.String(50), nullable=False),
        sa.Column('stepid',
                  sa.Integer,
                  sa.ForeignKey('steps.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('complete', sa.SmallInteger, nullable=False),
        sa.Column('num_lines', sa.Integer, nullable=False),
        # 's' = stdio, 't' = text, 'h' = html, 'd' = deleted
        sa.Column('type', sa.String(1), nullable=False),
    )

    logchunks = sautils.Table(
        'logchunks',
        metadata,
        sa.Column('logid',
                  sa.Integer,
                  sa.ForeignKey('logs.id', ondelete='CASCADE'),
                  nullable=False),
        # 0-based line number range in this chunk (inclusive); note that for
        # HTML logs, this counts lines of HTML, not lines of rendered output
        sa.Column('first_line', sa.Integer, nullable=False),
        sa.Column('last_line', sa.Integer, nullable=False),
        # log contents, including a terminating newline, encoded in utf-8 or,
        # if 'compressed' is not 0, compressed with gzip, bzip2 or lz4
        sa.Column('content', sa.LargeBinary(65536)),
        sa.Column('compressed', sa.SmallInteger, nullable=False),
    )

    # Tables related to buildsets
    # ---------------------------

    # This table contains input properties for buildsets
    buildset_properties = sautils.Table(
        'buildset_properties',
        metadata,
        sa.Column('buildsetid',
                  sa.Integer,
                  sa.ForeignKey('buildsets.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('property_name', sa.String(256), nullable=False),
        # JSON-encoded tuple of (value, source)
        sa.Column('property_value', sa.Text, nullable=False),
    )

    # This table represents Buildsets - sets of BuildRequests that share the
    # same original cause and source information.
    buildsets = sautils.Table(
        'buildsets',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # a simple external identifier to track down this buildset later, e.g.,
        # for try requests
        sa.Column('external_idstring', sa.String(256)),

        # a short string giving the reason the buildset was created
        sa.Column('reason', sa.String(256)),
        sa.Column('submitted_at', sa.Integer, nullable=False),

        # if this is zero, then the build set is still pending
        sa.Column('complete',
                  sa.SmallInteger,
                  nullable=False,
                  server_default=sa.DefaultClause("0")),
        sa.Column('complete_at', sa.Integer),

        # results is only valid when complete == 1; 0 = SUCCESS, 1 = WARNINGS,
        # etc - see master/buildbot/status/builder.py
        sa.Column('results', sa.SmallInteger),

        # optional parent build, we use use_alter to prevent circular reference
        # http://docs.sqlalchemy.org/en/latest/orm/relationships.html#rows-that-point-to-themselves-mutually-dependent-rows
        sa.Column('parent_buildid',
                  sa.Integer,
                  sa.ForeignKey('builds.id',
                                use_alter=True,
                                name='parent_buildid',
                                ondelete='SET NULL'),
                  nullable=True),
        # text describing what is the relationship with the build
        # could be 'triggered from', 'rebuilt from', 'inherited from'
        sa.Column('parent_relationship', sa.Text),
    )

    # Tables related to change sources
    # --------------------------------

    # The changesources table gives a unique identifier to each ChangeSource.  It
    # also links to other tables used to ensure only one master runs each
    # changesource
    changesources = sautils.Table(
        'changesources',
        metadata,
        sa.Column("id", sa.Integer, primary_key=True),

        # name for this changesource, as given in the configuration, plus a hash
        # of that name used for a unique index
        sa.Column('name', sa.Text, nullable=False),
        sa.Column('name_hash', sa.String(40), nullable=False),
    )

    # This links changesources to the master where they are running.  A changesource
    # linked to a master that is inactive can be unlinked by any master.  This
    # is a separate table so that we can "claim" changesources on a master by
    # inserting; this has better support in database servers for ensuring that
    # exactly one claim succeeds.
    changesource_masters = sautils.Table(
        'changesource_masters',
        metadata,
        sa.Column('changesourceid',
                  sa.Integer,
                  sa.ForeignKey('changesources.id', ondelete='CASCADE'),
                  nullable=False,
                  primary_key=True),
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # Tables related to workers
    # -------------------------

    workers = sautils.Table(
        "workers",
        metadata,
        sa.Column("id", sa.Integer, primary_key=True),
        sa.Column("name", sa.String(50), nullable=False),
        sa.Column("info", JsonObject, nullable=False),
        sa.Column("paused",
                  sa.SmallInteger,
                  nullable=False,
                  server_default="0"),
        sa.Column("graceful",
                  sa.SmallInteger,
                  nullable=False,
                  server_default="0"),
    )

    # link workers to all builder/master pairs for which they are
    # configured
    configured_workers = sautils.Table(
        'configured_workers',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True, nullable=False),
        sa.Column('buildermasterid',
                  sa.Integer,
                  sa.ForeignKey('builder_masters.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('workerid',
                  sa.Integer,
                  sa.ForeignKey('workers.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # link workers to the masters they are currently connected to
    connected_workers = sautils.Table(
        'connected_workers',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True, nullable=False),
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('workerid',
                  sa.Integer,
                  sa.ForeignKey('workers.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # Tables related to changes
    # ----------------------------

    # Files touched in changes
    change_files = sautils.Table(
        'change_files',
        metadata,
        sa.Column('changeid',
                  sa.Integer,
                  sa.ForeignKey('changes.changeid', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('filename', sa.String(1024), nullable=False),
    )

    # Properties for changes
    change_properties = sautils.Table(
        'change_properties',
        metadata,
        sa.Column('changeid',
                  sa.Integer,
                  sa.ForeignKey('changes.changeid', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('property_name', sa.String(256), nullable=False),
        # JSON-encoded tuple of (value, source)
        sa.Column('property_value', sa.Text, nullable=False),
    )

    # users associated with this change; this allows multiple users for
    # situations where a version-control system can represent both an author
    # and committer, for example.
    change_users = sautils.Table(
        "change_users",
        metadata,
        sa.Column('changeid',
                  sa.Integer,
                  sa.ForeignKey('changes.changeid', ondelete='CASCADE'),
                  nullable=False),
        # uid for the author of the change with the given changeid
        sa.Column('uid',
                  sa.Integer,
                  sa.ForeignKey('users.uid', ondelete='CASCADE'),
                  nullable=False),
    )

    # Changes to the source code, produced by ChangeSources
    changes = sautils.Table(
        'changes',
        metadata,
        # changeid also serves as 'change number'
        sa.Column('changeid', sa.Integer, primary_key=True),

        # author's name (usually an email address)
        sa.Column('author', sa.String(255), nullable=False),

        # committer's name
        sa.Column('committer', sa.String(255), nullable=True),

        # commit comment
        sa.Column('comments', sa.Text, nullable=False),

        # The branch where this change occurred.  When branch is NULL, that
        # means the main branch (trunk, master, etc.)
        sa.Column('branch', sa.String(255)),

        # revision identifier for this change
        sa.Column('revision', sa.String(255)),  # CVS uses NULL
        sa.Column('revlink', sa.String(256)),

        # this is the timestamp of the change - it is usually copied from the
        # version-control system, and may be long in the past or even in the
        # future!
        sa.Column('when_timestamp', sa.Integer, nullable=False),

        # an arbitrary string used for filtering changes
        sa.Column('category', sa.String(255)),

        # repository specifies, along with revision and branch, the
        # source tree in which this change was detected.
        sa.Column('repository',
                  sa.String(length=512),
                  nullable=False,
                  server_default=''),

        # codebase is a logical name to specify what is in the repository
        sa.Column('codebase',
                  sa.String(256),
                  nullable=False,
                  server_default=sa.DefaultClause("")),

        # project names the project this source code represents.  It is used
        # later to filter changes
        sa.Column('project',
                  sa.String(length=512),
                  nullable=False,
                  server_default=''),

        # the sourcestamp this change brought the codebase to
        sa.Column('sourcestampid',
                  sa.Integer,
                  sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'),
                  nullable=False),

        # The parent of the change
        # Even if for the moment there's only 1 parent for a change, we use plural here because
        # somedays a change will have multiple parent. This way we don't need
        # to change the API
        sa.Column('parent_changeids',
                  sa.Integer,
                  sa.ForeignKey('changes.changeid', ondelete='SET NULL'),
                  nullable=True),
    )

    # Tables related to sourcestamps
    # ------------------------------

    # Patches for SourceStamps that were generated through the try mechanism
    patches = sautils.Table(
        'patches',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # number of directory levels to strip off (patch -pN)
        sa.Column('patchlevel', sa.Integer, nullable=False),

        # base64-encoded version of the patch file
        sa.Column('patch_base64', sa.Text, nullable=False),

        # patch author, if known
        sa.Column('patch_author', sa.Text, nullable=False),

        # patch comment
        sa.Column('patch_comment', sa.Text, nullable=False),

        # subdirectory in which the patch should be applied; NULL for top-level
        sa.Column('subdir', sa.Text),
    )

    # A sourcestamp identifies a particular instance of the source code.
    # Ideally, this would always be absolute, but in practice source stamps can
    # also mean "latest" (when revision is NULL), which is of course a
    # time-dependent definition.
    sourcestamps = sautils.Table(
        'sourcestamps',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # hash of the branch, revision, patchid, repository, codebase, and
        # project, using hashColumns.
        sa.Column('ss_hash', sa.String(40), nullable=False),

        # the branch to check out.  When branch is NULL, that means
        # the main branch (trunk, master, etc.)
        sa.Column('branch', sa.String(256)),

        # the revision to check out, or the latest if NULL
        sa.Column('revision', sa.String(256)),

        # the patch to apply to generate this source code
        sa.Column('patchid',
                  sa.Integer,
                  sa.ForeignKey('patches.id', ondelete='CASCADE'),
                  nullable=True),

        # the repository from which this source should be checked out
        sa.Column('repository',
                  sa.String(length=512),
                  nullable=False,
                  server_default=''),

        # codebase is a logical name to specify what is in the repository
        sa.Column('codebase',
                  sa.String(256),
                  nullable=False,
                  server_default=sa.DefaultClause("")),

        # the project this source code represents
        sa.Column('project',
                  sa.String(length=512),
                  nullable=False,
                  server_default=''),

        # the time this sourcetamp was first seen (the first time it was added)
        sa.Column('created_at', sa.Integer, nullable=False),
    )

    # a many-to-may relationship between buildsets and sourcestamps
    buildset_sourcestamps = sautils.Table(
        'buildset_sourcestamps',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('buildsetid',
                  sa.Integer,
                  sa.ForeignKey('buildsets.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('sourcestampid',
                  sa.Integer,
                  sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # Tables related to schedulers
    # ----------------------------

    # The schedulers table gives a unique identifier to each scheduler.  It
    # also links to other tables used to ensure only one master runs each
    # scheduler, and to track changes that a scheduler may trigger a build for
    # later.
    schedulers = sautils.Table(
        'schedulers',
        metadata,
        sa.Column("id", sa.Integer, primary_key=True),

        # name for this scheduler, as given in the configuration, plus a hash
        # of that name used for a unique index
        sa.Column('name', sa.Text, nullable=False),
        sa.Column('name_hash', sa.String(40), nullable=False),
        sa.Column('enabled',
                  sa.SmallInteger,
                  server_default=sa.DefaultClause("1")),
    )

    # This links schedulers to the master where they are running.  A scheduler
    # linked to a master that is inactive can be unlinked by any master.  This
    # is a separate table so that we can "claim" schedulers on a master by
    # inserting; this has better support in database servers for ensuring that
    # exactly one claim succeeds.  The ID column is present for external users;
    # see bug #1053.
    scheduler_masters = sautils.Table(
        'scheduler_masters',
        metadata,
        sa.Column('schedulerid',
                  sa.Integer,
                  sa.ForeignKey('schedulers.id', ondelete='CASCADE'),
                  nullable=False,
                  primary_key=True),
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # This table references "classified" changes that have not yet been
    # "processed".  That is, the scheduler has looked at these changes and
    # determined that something should be done, but that hasn't happened yet.
    # Rows are deleted from this table as soon as the scheduler is done with
    # the change.
    scheduler_changes = sautils.Table(
        'scheduler_changes',
        metadata,
        sa.Column('schedulerid',
                  sa.Integer,
                  sa.ForeignKey('schedulers.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('changeid',
                  sa.Integer,
                  sa.ForeignKey('changes.changeid', ondelete='CASCADE'),
                  nullable=False),
        # true (nonzero) if this change is important to this scheduler
        sa.Column('important', sa.Integer),
    )

    # Tables related to builders
    # --------------------------

    builders = sautils.Table(
        'builders',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        # builder's name
        sa.Column('name', sa.Text, nullable=False),
        # builder's description
        sa.Column('description', sa.Text, nullable=True),
        # sha1 of name; used for a unique index
        sa.Column('name_hash', sa.String(40), nullable=False),
    )

    # This links builders to the master where they are running.  A builder
    # linked to a master that is inactive can be unlinked by any master.  Note
    # that builders can run on multiple masters at the same time.
    builder_masters = sautils.Table(
        'builder_masters',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True, nullable=False),
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('masterid',
                  sa.Integer,
                  sa.ForeignKey('masters.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # Tables related to tags
    # ----------------------

    tags = sautils.Table(
        'tags',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        # tag's name
        sa.Column('name', sa.Text, nullable=False),
        # sha1 of name; used for a unique index
        sa.Column('name_hash', sa.String(40), nullable=False),
    )

    # a many-to-may relationship between builders and tags
    builders_tags = sautils.Table(
        'builders_tags',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('tagid',
                  sa.Integer,
                  sa.ForeignKey('tags.id', ondelete='CASCADE'),
                  nullable=False),
    )

    # Tables related to test results
    # ------------------------------

    # Represents a single test result set. A step can any number of test result sets,
    # each of which may contain any number of test results.
    test_result_sets = sautils.Table(
        'test_result_sets',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # In the future we will want to rearrange the underlying data in the database according
        # to (builderid, buildid) tuple, so that huge number of entries in the table does not
        # reduce the efficiency of retrieval of data for a particular build.
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('buildid',
                  sa.Integer,
                  sa.ForeignKey('builds.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('stepid',
                  sa.Integer,
                  sa.ForeignKey('steps.id', ondelete='CASCADE'),
                  nullable=False),

        # The free-form description of the source of the test data that represent the test result
        # set.
        sa.Column('description', sa.Text, nullable=True),
        sa.Column('category', sa.Text, nullable=False),
        sa.Column('value_unit', sa.Text, nullable=False),

        # The number of passed tests in cases when the pass or fail criteria depends only on how
        # that single test runs.
        sa.Column('tests_passed', sa.Integer, nullable=True),

        # The number of failed tests in cases when the pass or fail criteria depends only on how
        # that single test runs.
        sa.Column('tests_failed', sa.Integer, nullable=True),

        # true when all test results associated with test result set have been generated.
        sa.Column('complete', sa.SmallInteger, nullable=False),
    )

    # Represents a test result. A single test result set will represent thousands of test results
    # in any significant codebase that's tested.
    #
    # A common table is used for all tests results regardless of what data they carry. Most serious
    # database engines will be able to optimize nullable fields out, so extra columns are almost
    # free when not used in such cases.
    test_results = sautils.Table(
        'test_results',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # The builder ID of the test result set that the test result belongs to.
        # This is included for future partitioning support.
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('test_result_setid',
                  sa.Integer,
                  sa.ForeignKey('test_result_sets.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('test_nameid',
                  sa.Integer,
                  sa.ForeignKey('test_names.id', ondelete='CASCADE'),
                  nullable=True),
        sa.Column('test_code_pathid',
                  sa.Integer,
                  sa.ForeignKey('test_code_paths.id', ondelete='CASCADE'),
                  nullable=True),

        # The code line that the test originated from
        sa.Column('line', sa.Integer, nullable=True),

        # The duration of the test execution itself
        sa.Column('duration_ns', sa.Integer, nullable=True),

        # The result of the test converted to a string.
        sa.Column('value', sa.Text, nullable=False),
    )

    # Represents the test names of test results.
    test_names = sautils.Table(
        'test_names',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # The builder ID of the test result set that the test result belongs to.
        # This is included for future partitioning support and also for querying all test names
        # for a builder.
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('name', sa.Text, nullable=False),
    )

    # Represents the file paths of test results.
    test_code_paths = sautils.Table(
        'test_code_paths',
        metadata,
        sa.Column('id', sa.Integer, primary_key=True),

        # The builder ID of the test result set that the test result belongs to.
        # This is included for future partitioning support
        sa.Column('builderid',
                  sa.Integer,
                  sa.ForeignKey('builders.id', ondelete='CASCADE'),
                  nullable=False),
        sa.Column('path', sa.Text, nullable=False),
    )

    # Tables related to objects
    # -------------------------

    # This table uniquely identifies objects that need to maintain state across
    # invocations.
    objects = sautils.Table(
        "objects",
        metadata,
        # unique ID for this object
        sa.Column("id", sa.Integer, primary_key=True),
        # object's user-given name
        sa.Column('name', sa.String(128), nullable=False),
        # object's class name, basically representing a "type" for the state
        sa.Column('class_name', sa.String(128), nullable=False),
    )

    # This table stores key/value pairs for objects, where the key is a string
    # and the value is a JSON string.
    object_state = sautils.Table(
        "object_state",
        metadata,
        # object for which this value is set
        sa.Column('objectid',
                  sa.Integer,
                  sa.ForeignKey('objects.id', ondelete='CASCADE'),
                  nullable=False),
        # name for this value (local to the object)
        sa.Column("name", sa.String(length=255), nullable=False),
        # value, as a JSON string
        sa.Column("value_json", sa.Text, nullable=False),
    )

    # Tables related to users
    # -----------------------

    # This table identifies individual users, and contains buildbot-specific
    # information about those users.
    users = sautils.Table(
        "users",
        metadata,
        # unique user id number
        sa.Column("uid", sa.Integer, primary_key=True),

        # identifier (nickname) for this user; used for display
        sa.Column("identifier", sa.String(255), nullable=False),

        # username portion of user credentials for authentication
        sa.Column("bb_username", sa.String(128)),

        # password portion of user credentials for authentication
        sa.Column("bb_password", sa.String(128)),
    )

    # This table stores information identifying a user that's related to a
    # particular interface - a version-control system, status plugin, etc.
    users_info = sautils.Table(
        "users_info",
        metadata,
        # unique user id number
        sa.Column('uid',
                  sa.Integer,
                  sa.ForeignKey('users.uid', ondelete='CASCADE'),
                  nullable=False),

        # type of user attribute, such as 'git'
        sa.Column("attr_type", sa.String(128), nullable=False),

        # data for given user attribute, such as a commit string or password
        sa.Column("attr_data", sa.String(128), nullable=False),
    )

    # Tables related to masters
    # -------------------------

    masters = sautils.Table(
        "masters",
        metadata,
        # unique id per master
        sa.Column('id', sa.Integer, primary_key=True),

        # master's name (generally in the form hostname:basedir)
        sa.Column('name', sa.Text, nullable=False),
        # sha1 of name; used for a unique index
        sa.Column('name_hash', sa.String(40), nullable=False),

        # true if this master is running
        sa.Column('active', sa.Integer, nullable=False),

        # updated periodically by a running master, so silently failed masters
        # can be detected by other masters
        sa.Column('last_active', sa.Integer, nullable=False),
    )

    # Indexes
    # -------

    sa.Index('buildrequests_buildsetid', buildrequests.c.buildsetid)
    sa.Index('buildrequests_builderid', buildrequests.c.builderid)
    sa.Index('buildrequests_complete', buildrequests.c.complete)
    sa.Index('build_properties_buildid', build_properties.c.buildid)
    sa.Index('build_data_buildid_name',
             build_data.c.buildid,
             build_data.c.name,
             unique=True)
    sa.Index('builds_buildrequestid', builds.c.buildrequestid)
    sa.Index('buildsets_complete', buildsets.c.complete)
    sa.Index('buildsets_submitted_at', buildsets.c.submitted_at)
    sa.Index('buildset_properties_buildsetid',
             buildset_properties.c.buildsetid)
    sa.Index('workers_name', workers.c.name, unique=True)
    sa.Index('changes_branch', changes.c.branch)
    sa.Index('changes_revision', changes.c.revision)
    sa.Index('changes_author', changes.c.author)
    sa.Index('changes_category', changes.c.category)
    sa.Index('changes_when_timestamp', changes.c.when_timestamp)
    sa.Index('change_files_changeid', change_files.c.changeid)
    sa.Index('change_properties_changeid', change_properties.c.changeid)
    sa.Index('changes_sourcestampid', changes.c.sourcestampid)
    sa.Index('changesource_name_hash', changesources.c.name_hash, unique=True)
    sa.Index('scheduler_name_hash', schedulers.c.name_hash, unique=True)
    sa.Index('scheduler_changes_schedulerid', scheduler_changes.c.schedulerid)
    sa.Index('scheduler_changes_changeid', scheduler_changes.c.changeid)
    sa.Index('scheduler_changes_unique',
             scheduler_changes.c.schedulerid,
             scheduler_changes.c.changeid,
             unique=True)
    sa.Index('builder_name_hash', builders.c.name_hash, unique=True)
    sa.Index('builder_masters_builderid', builder_masters.c.builderid)
    sa.Index('builder_masters_masterid', builder_masters.c.masterid)
    sa.Index('builder_masters_identity',
             builder_masters.c.builderid,
             builder_masters.c.masterid,
             unique=True)
    sa.Index('tag_name_hash', tags.c.name_hash, unique=True)
    sa.Index('builders_tags_builderid', builders_tags.c.builderid)
    sa.Index('builders_tags_unique',
             builders_tags.c.builderid,
             builders_tags.c.tagid,
             unique=True)
    sa.Index('configured_workers_buildmasterid',
             configured_workers.c.buildermasterid)
    sa.Index('configured_workers_workers', configured_workers.c.workerid)
    sa.Index('configured_workers_identity',
             configured_workers.c.buildermasterid,
             configured_workers.c.workerid,
             unique=True)
    sa.Index('connected_workers_masterid', connected_workers.c.masterid)
    sa.Index('connected_workers_workers', connected_workers.c.workerid)
    sa.Index('connected_workers_identity',
             connected_workers.c.masterid,
             connected_workers.c.workerid,
             unique=True)
    sa.Index('users_identifier', users.c.identifier, unique=True)
    sa.Index('users_info_uid', users_info.c.uid)
    sa.Index('users_info_uid_attr_type',
             users_info.c.uid,
             users_info.c.attr_type,
             unique=True)
    sa.Index('users_info_attrs',
             users_info.c.attr_type,
             users_info.c.attr_data,
             unique=True)
    sa.Index('change_users_changeid', change_users.c.changeid)
    sa.Index('users_bb_user', users.c.bb_username, unique=True)
    sa.Index('object_identity',
             objects.c.name,
             objects.c.class_name,
             unique=True)
    sa.Index('name_per_object',
             object_state.c.objectid,
             object_state.c.name,
             unique=True)
    sa.Index('master_name_hashes', masters.c.name_hash, unique=True)
    sa.Index('buildrequest_claims_brids',
             buildrequest_claims.c.brid,
             unique=True)
    sa.Index('sourcestamps_ss_hash_key', sourcestamps.c.ss_hash, unique=True)
    sa.Index('buildset_sourcestamps_buildsetid',
             buildset_sourcestamps.c.buildsetid)
    sa.Index('buildset_sourcestamps_unique',
             buildset_sourcestamps.c.buildsetid,
             buildset_sourcestamps.c.sourcestampid,
             unique=True)
    sa.Index('builds_number', builds.c.builderid, builds.c.number, unique=True)
    sa.Index('builds_workerid', builds.c.workerid)
    sa.Index('builds_masterid', builds.c.masterid)
    sa.Index('steps_number', steps.c.buildid, steps.c.number, unique=True)
    sa.Index('steps_name', steps.c.buildid, steps.c.name, unique=True)
    sa.Index('steps_started_at', steps.c.started_at)
    sa.Index('logs_slug', logs.c.stepid, logs.c.slug, unique=True)
    sa.Index('logchunks_firstline', logchunks.c.logid, logchunks.c.first_line)
    sa.Index('logchunks_lastline', logchunks.c.logid, logchunks.c.last_line)
    sa.Index('test_names_name',
             test_names.c.builderid,
             test_names.c.name,
             mysql_length={'name': 255})
    sa.Index('test_code_paths_path',
             test_code_paths.c.builderid,
             test_code_paths.c.path,
             mysql_length={'path': 255})

    # MySQL creates indexes for foreign keys, and these appear in the
    # reflection.  This is a list of (table, index) names that should be
    # expected on this platform

    implied_indexes = [
        ('change_users', dict(unique=False, column_names=['uid'], name='uid')),
        ('sourcestamps',
         dict(unique=False, column_names=['patchid'], name='patchid')),
        ('scheduler_masters',
         dict(unique=False, column_names=['masterid'], name='masterid')),
        ('changesource_masters',
         dict(unique=False, column_names=['masterid'], name='masterid')),
        ('buildset_sourcestamps',
         dict(unique=False,
              column_names=['sourcestampid'],
              name='sourcestampid')),
        ('buildsets',
         dict(unique=False,
              column_names=['parent_buildid'],
              name='parent_buildid')),
        ('builders_tags',
         dict(unique=False, column_names=['tagid'], name='tagid')),
        ('changes',
         dict(unique=False,
              column_names=['parent_changeids'],
              name='parent_changeids')),
        ('test_result_sets', {
            'name': 'builderid',
            'column_names': ['builderid'],
            'unique': False,
        }),
        ('test_result_sets', {
            'name': 'buildid',
            'column_names': ['buildid'],
            'unique': False,
        }),
        ('test_result_sets', {
            'name': 'stepid',
            'column_names': ['stepid'],
            'unique': False,
        }),
        ('test_results', {
            'name': 'test_result_setid',
            'column_names': ['test_result_setid'],
            'unique': False,
        }),
        ('test_results', {
            'name': 'test_code_pathid',
            'column_names': ['test_code_pathid'],
            'unique': False,
        }),
        ('test_results', {
            'name': 'builderid',
            'column_names': ['builderid'],
            'unique': False,
        }),
        ('test_results', {
            'name': 'test_nameid',
            'column_names': ['test_nameid'],
            'unique': False,
        }),
    ]

    # Migration support
    # -----------------

    # this is a bit more complicated than might be expected because the first
    # seven database versions were once implemented using a homespun migration
    # system, and we need to support upgrading masters from that system.  The
    # old system used a 'version' table, where SQLAlchemy-Migrate uses
    # 'migrate_version'

    repo_path = util.sibpath(__file__, "migrate")

    @defer.inlineCallbacks
    def is_current(self):
        if ControlledSchema is None:
            # this should have been caught earlier by enginestrategy.py with a
            # nicer error message
            raise ImportError("SQLAlchemy/SQLAlchemy-Migrate version conflict")

        def thd(engine):
            # we don't even have to look at the old version table - if there's
            # no migrate_version, then we're not up to date.
            repo = migrate.versioning.repository.Repository(self.repo_path)
            repo_version = repo.latest
            try:
                # migrate.api doesn't let us hand in an engine
                schema = ControlledSchema(engine, self.repo_path)
                db_version = schema.version
            except exceptions.DatabaseNotControlledError:
                return False

            return db_version == repo_version

        ret = yield self.db.pool.do_with_engine(thd)
        return ret

    # returns a Deferred that returns None
    def create(self):
        # this is nice and simple, but used only for tests
        def thd(engine):
            self.metadata.create_all(bind=engine)

        return self.db.pool.do_with_engine(thd)

    @defer.inlineCallbacks
    def upgrade(self):

        # here, things are a little tricky.  If we have a 'version' table, then
        # we need to version_control the database with the proper version
        # number, drop 'version', and then upgrade.  If we have no 'version'
        # table and no 'migrate_version' table, then we need to version_control
        # the database.  Otherwise, we just need to upgrade it.

        def table_exists(engine, tbl):
            try:
                r = engine.execute("select * from {} limit 1".format(tbl))
                r.close()
                return True
            except Exception:
                return False

        # http://code.google.com/p/sqlalchemy-migrate/issues/detail?id=100
        # means  we cannot use the migrate.versioning.api module.  So these
        # methods perform similar wrapping functions to what is done by the API
        # functions, but without disposing of the engine.
        def upgrade(engine):
            schema = ControlledSchema(engine, self.repo_path)
            changeset = schema.changeset(None)
            with sautils.withoutSqliteForeignKeys(engine):
                for version, change in changeset:
                    log.msg('migrating schema version {} -> {}'.format(
                        version, version + 1))
                    schema.runchange(version, change, 1)

        def check_sqlalchemy_migrate_version():
            # sqlalchemy-migrate started including a version number in 0.7; we
            # support back to 0.6.1, but not 0.6.  We'll use some discovered
            # differences between 0.6.1 and 0.6 to get that resolution.
            version = getattr(migrate, '__version__', 'old')
            if version == 'old':
                try:
                    from migrate.versioning import schemadiff
                    if hasattr(schemadiff, 'ColDiff'):
                        version = "0.6.1"
                    else:
                        version = "0.6"
                except Exception:
                    version = "0.0"
            version_tup = tuple(map(int, version.split('-', 1)[0].split('.')))
            log.msg("using SQLAlchemy-Migrate version {}".format(version))
            if version_tup < (0, 6, 1):
                raise RuntimeError(
                    ("You are using SQLAlchemy-Migrate {}. "
                     "The minimum version is 0.6.1.").format(version))

        def version_control(engine, version=None):
            ControlledSchema.create(engine, self.repo_path, version)

        # the upgrade process must run in a db thread
        def thd(engine):
            # if the migrate_version table exists, we can just let migrate
            # take care of this process.
            if table_exists(engine, 'migrate_version'):
                r = engine.execute(
                    "select version from migrate_version limit 1")
                old_version = r.scalar()
                if old_version < 40:
                    raise EightUpgradeError()
                try:
                    upgrade(engine)
                except sa.exc.NoSuchTableError as e:  # pragma: no cover
                    if 'migration_tmp' in str(e):
                        log.err(
                            'A serious error has been encountered during the upgrade. The '
                            'previous upgrade has been likely interrupted. The database has '
                            'been damaged and automatic recovery is impossible.'
                        )
                        log.err(
                            'If you believe this is an error, please submit a bug to the '
                            'Buildbot project.')
                    raise

            # if the version table exists, then we can version_control things
            # at that version, drop the version table, and let migrate take
            # care of the rest.
            elif table_exists(engine, 'version'):
                raise EightUpgradeError()

            # otherwise, this db is new, so we don't bother using the migration engine
            # and just create the tables, and put the version directly to
            # latest
            else:
                # do some tests before getting started
                test_unicode(engine)

                log.msg("Initializing empty database")
                Model.metadata.create_all(engine)
                repo = migrate.versioning.repository.Repository(self.repo_path)

                version_control(engine, repo.latest)

        check_sqlalchemy_migrate_version()
        yield self.db.pool.do_with_engine(thd)
예제 #26
0
 def append_index(self, cls, column):
     sa.Index(
         '_'.join(('ix', column.table.name, column.name)),
         column,
         postgresql_using='gin'
     )
예제 #27
0
파일: 071_mitaka.py 프로젝트: zzjeric/heat
def upgrade(migrate_engine):
    meta = sqlalchemy.MetaData()
    meta.bind = migrate_engine

    raw_template = sqlalchemy.Table(
        'raw_template', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('template', types.LongText),
        sqlalchemy.Column('files', types.Json),
        sqlalchemy.Column('environment', types.Json),

        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    user_creds = sqlalchemy.Table(
        'user_creds', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer,
                          primary_key=True, nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('username', sqlalchemy.String(255)),
        sqlalchemy.Column('password', sqlalchemy.String(255)),
        sqlalchemy.Column('region_name', sqlalchemy.String(length=255)),
        sqlalchemy.Column('decrypt_method', sqlalchemy.String(length=64)),
        sqlalchemy.Column('tenant', sqlalchemy.String(1024)),
        sqlalchemy.Column('auth_url', sqlalchemy.Text),
        sqlalchemy.Column('tenant_id', sqlalchemy.String(256)),
        sqlalchemy.Column('trust_id', sqlalchemy.String(255)),
        sqlalchemy.Column('trustor_user_id', sqlalchemy.String(64)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    stack = sqlalchemy.Table(
        'stack', meta,
        sqlalchemy.Column('id', sqlalchemy.String(36),
                          primary_key=True, nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
        sqlalchemy.Column('name', sqlalchemy.String(255)),
        sqlalchemy.Column('raw_template_id',
                          sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('raw_template.id'),
                          nullable=False),
        sqlalchemy.Column('prev_raw_template_id',
                          sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('raw_template.id')),
        sqlalchemy.Column('user_creds_id', sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('user_creds.id')),
        sqlalchemy.Column('username', sqlalchemy.String(256)),
        sqlalchemy.Column('owner_id', sqlalchemy.String(36)),
        sqlalchemy.Column('action', sqlalchemy.String(255)),
        sqlalchemy.Column('status', sqlalchemy.String(255)),
        sqlalchemy.Column('status_reason', types.LongText),
        sqlalchemy.Column('timeout', sqlalchemy.Integer),
        sqlalchemy.Column('tenant', sqlalchemy.String(256)),
        sqlalchemy.Column('disable_rollback', sqlalchemy.Boolean,
                          nullable=False),
        sqlalchemy.Column('stack_user_project_id',
                          sqlalchemy.String(length=64)),
        sqlalchemy.Column('backup', sqlalchemy.Boolean, default=False),
        sqlalchemy.Column('nested_depth', sqlalchemy.Integer, default=0),
        sqlalchemy.Column('convergence', sqlalchemy.Boolean, default=False),
        sqlalchemy.Column('current_traversal', sqlalchemy.String(36)),
        sqlalchemy.Column('current_deps', types.Json),
        sqlalchemy.Column('parent_resource_name', sqlalchemy.String(255)),
        sqlalchemy.Index('ix_stack_name', 'name', mysql_length=255),
        sqlalchemy.Index('ix_stack_tenant', 'tenant', mysql_length=255),
        sqlalchemy.Index('ix_stack_owner_id', 'owner_id', mysql_length=36),

        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    resource = sqlalchemy.Table(
        'resource', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('uuid', sqlalchemy.String(36), unique=True,
                          default=lambda: str(uuid.uuid4())),
        sqlalchemy.Column('nova_instance', sqlalchemy.String(255)),
        sqlalchemy.Column('name', sqlalchemy.String(255)),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('action', sqlalchemy.String(255)),
        sqlalchemy.Column('status', sqlalchemy.String(255)),
        sqlalchemy.Column('status_reason', types.LongText),
        sqlalchemy.Column('stack_id', sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('stack.id'), nullable=False),
        sqlalchemy.Column('rsrc_metadata', types.LongText),
        sqlalchemy.Column('properties_data', types.Json),
        sqlalchemy.Column('engine_id', sqlalchemy.String(length=36)),
        sqlalchemy.Column('atomic_key', sqlalchemy.Integer),
        sqlalchemy.Column('needed_by', types.List),
        sqlalchemy.Column('requires', types.List),
        sqlalchemy.Column('replaces', sqlalchemy.Integer),
        sqlalchemy.Column('replaced_by', sqlalchemy.Integer),
        sqlalchemy.Column('current_template_id', sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('raw_template.id')),
        sqlalchemy.Column('properties_data_encrypted',
                          sqlalchemy.Boolean,
                          default=False),
        sqlalchemy.Column('root_stack_id', sqlalchemy.String(36)),
        sqlalchemy.Index('ix_resource_root_stack_id',
                         'root_stack_id',
                         mysql_length=36),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    resource_data = sqlalchemy.Table(
        'resource_data', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('key', sqlalchemy.String(255)),
        sqlalchemy.Column('value', sqlalchemy.Text),
        sqlalchemy.Column('redact', sqlalchemy.Boolean),
        sqlalchemy.Column('decrypt_method', sqlalchemy.String(length=64)),
        sqlalchemy.Column('resource_id',
                          sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('resource.id'),
                          nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    event = sqlalchemy.Table(
        'event', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('uuid', sqlalchemy.String(36),
                          default=lambda: str(uuid.uuid4()), unique=True),
        sqlalchemy.Column('stack_id', sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('stack.id'), nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('resource_action', sqlalchemy.String(255)),
        sqlalchemy.Column('resource_status', sqlalchemy.String(255)),
        sqlalchemy.Column('resource_name', sqlalchemy.String(255)),
        sqlalchemy.Column('physical_resource_id', sqlalchemy.String(255)),
        sqlalchemy.Column('resource_status_reason', sqlalchemy.String(255)),
        sqlalchemy.Column('resource_type', sqlalchemy.String(255)),
        sqlalchemy.Column('resource_properties', sqlalchemy.PickleType),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    watch_rule = sqlalchemy.Table(
        'watch_rule', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('name', sqlalchemy.String(255)),
        sqlalchemy.Column('state', sqlalchemy.String(255)),
        sqlalchemy.Column('rule', types.LongText),
        sqlalchemy.Column('last_evaluated', sqlalchemy.DateTime),
        sqlalchemy.Column('stack_id', sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('stack.id'), nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    watch_data = sqlalchemy.Table(
        'watch_data', meta,
        sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('data', types.LongText),
        sqlalchemy.Column('watch_rule_id', sqlalchemy.Integer,
                          sqlalchemy.ForeignKey('watch_rule.id'),
                          nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    stack_lock = sqlalchemy.Table(
        'stack_lock', meta,
        sqlalchemy.Column('stack_id', sqlalchemy.String(length=36),
                          sqlalchemy.ForeignKey('stack.id'),
                          primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('engine_id', sqlalchemy.String(length=36)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    software_config = sqlalchemy.Table(
        'software_config', meta,
        sqlalchemy.Column('id', sqlalchemy.String(36),
                          primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('name', sqlalchemy.String(255)),
        sqlalchemy.Column('group', sqlalchemy.String(255)),
        sqlalchemy.Column('config', types.LongText),
        sqlalchemy.Column('tenant', sqlalchemy.String(64),
                          nullable=False,
                          index=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    software_deployment = sqlalchemy.Table(
        'software_deployment', meta,
        sqlalchemy.Column('id', sqlalchemy.String(36),
                          primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime,
                          index=True),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('server_id', sqlalchemy.String(36),
                          nullable=False,
                          index=True),
        sqlalchemy.Column('config_id',
                          sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('software_config.id'),
                          nullable=False),
        sqlalchemy.Column('input_values', types.Json),
        sqlalchemy.Column('output_values', types.Json),
        sqlalchemy.Column('action', sqlalchemy.String(255)),
        sqlalchemy.Column('status', sqlalchemy.String(255)),
        sqlalchemy.Column('status_reason', types.LongText),
        sqlalchemy.Column('tenant', sqlalchemy.String(64),
                          nullable=False,
                          index=True),
        sqlalchemy.Column('stack_user_project_id',
                          sqlalchemy.String(length=64)),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    snapshot = sqlalchemy.Table(
        'snapshot', meta,
        sqlalchemy.Column('id', sqlalchemy.String(36),
                          primary_key=True,
                          nullable=False),
        sqlalchemy.Column('stack_id',
                          sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('stack.id'),
                          nullable=False),
        sqlalchemy.Column('name', sqlalchemy.String(255)),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('status', sqlalchemy.String(255)),
        sqlalchemy.Column('status_reason', sqlalchemy.String(255)),
        sqlalchemy.Column('data', types.Json),
        sqlalchemy.Column('tenant', sqlalchemy.String(64),
                          nullable=False,
                          index=True),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    service = sqlalchemy.Table(
        'service', meta,
        sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
                          default=lambda: str(uuid.uuid4())),
        sqlalchemy.Column('engine_id', sqlalchemy.String(36), nullable=False),
        sqlalchemy.Column('host', sqlalchemy.String(255), nullable=False),
        sqlalchemy.Column('hostname', sqlalchemy.String(255), nullable=False),
        sqlalchemy.Column('binary', sqlalchemy.String(255), nullable=False),
        sqlalchemy.Column('topic', sqlalchemy.String(255), nullable=False),
        sqlalchemy.Column('report_interval', sqlalchemy.Integer,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    stack_tag = sqlalchemy.Table(
        'stack_tag', meta,
        sqlalchemy.Column('id',
                          sqlalchemy.Integer,
                          primary_key=True,
                          nullable=False),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
        sqlalchemy.Column('tag', sqlalchemy.Unicode(80)),
        sqlalchemy.Column('stack_id',
                          sqlalchemy.String(36),
                          sqlalchemy.ForeignKey('stack.id'),
                          nullable=False),
        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    sync_point = sqlalchemy.Table(
        'sync_point', meta,
        sqlalchemy.Column('entity_id', sqlalchemy.String(36)),
        sqlalchemy.Column('traversal_id', sqlalchemy.String(36)),
        sqlalchemy.Column('is_update', sqlalchemy.Boolean),
        sqlalchemy.Column('atomic_key', sqlalchemy.Integer,
                          nullable=False),
        sqlalchemy.Column('stack_id', sqlalchemy.String(36),
                          nullable=False),
        sqlalchemy.Column('input_data', types.Json),
        sqlalchemy.Column('created_at', sqlalchemy.DateTime),
        sqlalchemy.Column('updated_at', sqlalchemy.DateTime),

        sqlalchemy.PrimaryKeyConstraint('entity_id',
                                        'traversal_id',
                                        'is_update'),
        sqlalchemy.ForeignKeyConstraint(['stack_id'], ['stack.id'],
                                        name='fk_stack_id'),

        mysql_engine='InnoDB',
        mysql_charset='utf8'
    )

    tables = (
        raw_template,
        user_creds,
        stack,
        resource,
        resource_data,
        event,
        watch_rule,
        watch_data,
        stack_lock,
        software_config,
        software_deployment,
        snapshot,
        service,
        stack_tag,
        sync_point,
    )

    for index, table in enumerate(tables):
        try:
            table.create()
        except Exception:
            # If an error occurs, drop all tables created so far to return
            # to the previously existing state.
            meta.drop_all(tables=tables[:index])
            raise
예제 #28
0
파일: models.py 프로젝트: alanquillin/quark
                                backref='ports',
                                order_by='IPAddress.allocated_at')

    @declarative.declared_attr
    def security_groups(cls):
        primaryjoin = cls.id == port_group_association_table.c.port_id
        secondaryjoin = (port_group_association_table.c.group_id ==
                         SecurityGroup.id)
        return orm.relationship(SecurityGroup, primaryjoin=primaryjoin,
                                secondaryjoin=secondaryjoin,
                                secondary=port_group_association_table,
                                backref="ports")


# Indices tailored specifically to get_instance_nw_info calls from nova
sa.Index("idx_ports_1", Port.__table__.c.device_id, Port.__table__.c.tenant_id)
sa.Index("idx_ports_2", Port.__table__.c.device_owner,
         Port.__table__.c.network_id)
sa.Index("idx_ports_3", Port.__table__.c.tenant_id)


class MacAddress(BASEV2, models.HasTenant):
    __tablename__ = "quark_mac_addresses"
    address = sa.Column(sa.BigInteger(), primary_key=True)
    mac_address_range_id = sa.Column(
        sa.String(36),
        sa.ForeignKey("quark_mac_address_ranges.id", ondelete="CASCADE"),
        nullable=False)
    deallocated = sa.Column(sa.Boolean(), index=True)
    deallocated_at = sa.Column(sa.DateTime(), index=True)
    orm.relationship(Port, backref="mac_address")
예제 #29
0
파일: sql.py 프로젝트: somu-analyst/blaze
def create_index(s, column, name=None, unique=False):
    if name is None:
        raise ValueError('SQL indexes must have a name')
    sa.Index(name, getattr(s.c, column), unique=unique).create(s.bind)
예제 #30
0
                ret.append(a)
        return ret

    def add_note(self, short, long=None):
        n = Note()
        n.short = short
        n.long = long
        self.notes.append(n)
        self.cur_note = n.short
        Session.add(n)
        Session.add(self)
        Session.commit()
        return n


sa.Index('addr_uptime', Alert.addr, Alert.uptime, unique=True)


class Host(Base):
    __tablename__ = 'hosts'

    id = sa.Column(sa.types.Integer, primary_key=True)
    addr = sa.Column(sa.types.String(255), nullable=False)
    name = sa.Column(sa.types.String(255), nullable=False)
    active = sa.Column(sa.types.Boolean, nullable=False, default=True)

    alerts = orm.relation('Alert',
                          backref='host',
                          lazy='dynamic',
                          order_by=sa.desc(Alert.time))