def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('modes', sa.Column('initial', sa.PickleType(), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'challenges', sa.Column('id', sa.Integer(), nullable=False), sa.Column('author', sa.Unicode(length=32), nullable=True), sa.Column('name', sa.String(length=32), nullable=True), sa.Column('title', sa.Unicode(length=64), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('hint', sa.Text(), nullable=True), sa.Column('category', sa.Unicode(length=64), nullable=True), sa.Column('value', sa.Integer(), nullable=True), sa.Column('grader', sa.UnicodeText(), nullable=True), sa.Column('autogen', sa.Boolean(), nullable=True), sa.Column('programming', sa.Boolean(), nullable=True), sa.Column('threshold', sa.Integer(), nullable=True), sa.Column('weightmap', sa.PickleType(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_index(op.f('ix_challenges_id'), 'challenges', ['id'], unique=False) op.create_table('config', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', sa.Unicode(length=32), nullable=True), sa.Column('value', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_config_key'), 'config', ['key'], unique=False) op.create_table( 'teams', sa.Column('id', sa.Integer(), nullable=False), sa.Column('teamname', sa.Unicode(length=32), nullable=True), sa.Column('affiliation', sa.Unicode(length=48), nullable=True), sa.Column('captain', sa.Integer(), nullable=True), sa.Column('banned', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('teamname')) op.create_index(op.f('ix_teams_id'), 'teams', ['id'], unique=False) op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('tid', sa.Integer(), nullable=True), sa.Column('name', sa.Unicode(length=32), nullable=True), sa.Column('username', sa.String(length=16), nullable=True), sa.Column('email', sa.String(length=128), nullable=True), sa.Column('password', sa.String(length=128), nullable=True), sa.Column('admin', sa.Boolean(), nullable=True), sa.Column('level', sa.Integer(), nullable=True), sa.Column('register_time', sa.DateTime(), nullable=True), sa.Column('reset_token', sa.String(length=32), nullable=True), sa.Column('otp_secret', sa.String(length=16), nullable=True), sa.Column('otp_confirmed', sa.Boolean(), nullable=True), sa.Column('email_token', sa.String(length=32), nullable=True), sa.Column('email_verified', sa.Boolean(), nullable=True), sa.Column('avatar', sa.String(length=128), nullable=True), sa.ForeignKeyConstraint( ['tid'], ['teams.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False) op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) op.create_table('solves', sa.Column('id', sa.Integer(), nullable=False), sa.Column('pid', sa.Integer(), nullable=True), sa.Column('tid', sa.Integer(), nullable=True), sa.Column('uid', sa.Integer(), nullable=True), sa.Column('date', sa.DateTime(), nullable=True), sa.Column('correct', sa.Boolean(), nullable=True), sa.Column('flag', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['pid'], ['challenges.id'], ), sa.ForeignKeyConstraint( ['tid'], ['teams.id'], ), sa.ForeignKeyConstraint( ['uid'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_solves_id'), 'solves', ['id'], unique=False) op.create_index(op.f('ix_solves_pid'), 'solves', ['pid'], unique=False) op.create_index(op.f('ix_solves_tid'), 'solves', ['tid'], unique=False) op.create_index(op.f('ix_solves_uid'), 'solves', ['uid'], unique=False)
def upgrade(): op.add_column('dag_run', sa.Column('conf', sa.PickleType(), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'loop', sa.Column('last_played_clips', sa.PickleType(), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "set", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(), nullable=False), sa.Column("bonuses", sa.PickleType(), nullable=True), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "user", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("username", sa.String(), nullable=False), sa.Column("email", sa.String(), nullable=False), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "custom_set", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(), nullable=True), sa.Column("description", sa.String(), nullable=True), sa.Column("owner_id", postgresql.UUID(), nullable=True), sa.Column("creation_date", sa.DateTime(), nullable=True), sa.Column("level", sa.Integer(), nullable=True), sa.ForeignKeyConstraint(["owner_id"], ["user.uuid"],), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "custom_set_exos", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("stat", sa.String(), nullable=True), sa.Column("value", sa.Integer(), nullable=True), sa.Column("custom_set_id", postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint(["custom_set_id"], ["custom_set.uuid"],), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "custom_set_stats", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("scrolledVitality", sa.Integer(), nullable=True), sa.Column("scrolledWisdom", sa.Integer(), nullable=True), sa.Column("scrolledStrength", sa.Integer(), nullable=True), sa.Column("scrolledIntelligence", sa.Integer(), nullable=True), sa.Column("scrolledChance", sa.Integer(), nullable=True), sa.Column("scrolledAgility", sa.Integer(), nullable=True), sa.Column("baseVitality", sa.Integer(), nullable=True), sa.Column("baseWisdom", sa.Integer(), nullable=True), sa.Column("baseStrength", sa.Integer(), nullable=True), sa.Column("baseIntelligence", sa.Integer(), nullable=True), sa.Column("baseChance", sa.Integer(), nullable=True), sa.Column("baseAgility", sa.Integer(), nullable=True), sa.Column("custom_set_id", postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint(["custom_set_id"], ["custom_set.uuid"],), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "item", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("name", sa.String(), nullable=False), sa.Column("itemType", sa.String(), nullable=False), sa.Column("set_id", postgresql.UUID(), nullable=True), sa.Column("level", sa.Integer(), nullable=False), sa.Column("imageUrl", sa.String(), nullable=True), sa.Column("custom_set_id", postgresql.UUID(), nullable=True), sa.ForeignKeyConstraint(["custom_set_id"], ["custom_set.uuid"],), sa.ForeignKeyConstraint(["set_id"], ["set.uuid"],), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "item_conditions", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("item_id", postgresql.UUID(), nullable=True), sa.Column("stat_type", sa.String(), nullable=True), sa.Column("conditionType", sa.String(), nullable=True), sa.Column("limit", sa.Integer(), nullable=True), sa.ForeignKeyConstraint(["item_id"], ["item.uuid"],), sa.PrimaryKeyConstraint("uuid"), ) op.create_table( "item_stats", sa.Column( "uuid", postgresql.UUID(as_uuid=True), server_default=sa.text("uuid_generate_v4()"), nullable=False, ), sa.Column("item_id", postgresql.UUID(), nullable=True), sa.Column("stat", sa.String(), nullable=True), sa.Column("minValue", sa.Integer(), nullable=True), sa.Column("maxValue", sa.Integer(), nullable=True), sa.ForeignKeyConstraint(["item_id"], ["item.uuid"],), sa.PrimaryKeyConstraint("uuid"), )
class User(db.Model): __tablename__ = 'user' created = sa.Column(sa.DateTime()) modified = sa.Column(sa.DateTime()) id = sa.Column(sa.String(43), primary_key=True) info = sa.Column( MutableDict.as_mutable(sa.PickleType(pickler=json_pickler))) nickname = sa.Column(sa.String(256), unique=True) pending = sa.Column(sa.String(64)) # sent|received queued = sa.Column(sa.Boolean()) peered = sa.Column(sa.Boolean()) online = sa.Column(sa.Boolean()) def __repr__(self): return self.id @classmethod def get(cls, id): return cls.query.filter_by(id=id).first() @classmethod def get_or_create(cls, id): user = cls.get(id) if not user: user = cls(id=id, peered=False, online=False) user.info = {} if state.nodes and state.nodes._local and id in state.nodes._local._nodes: user.info['local'] = state.nodes._local._nodes[id] user.info['username'] = user.info['local']['username'] user.update_name() user.save() return user def save(self): state.db.session.add(self) state.db.session.commit() @property def name(self): name = self.nickname if self.id != settings.USER_ID else '' return name @property def library(self): l = List.get_or_create(self.id, '') if l.index_ != -1: l.index_ = -1 l.save() return l def json(self): j = {} if self.info: j.update(self.info) j['id'] = self.id if self.pending: j['pending'] = self.pending j['peered'] = self.peered j['online'] = self.is_online() j['nickname'] = self.info.get('nickname') j['username'] = self.info.get( 'username' ) if self.id != settings.USER_ID else settings.preferences['username'] j['name'] = self.name return j def is_online(self): return state.nodes and state.nodes.is_online(self.id) def lists_json(self): self.library return [l.json() for l in self.lists.order_by('index_')] def clear_list_cache(self): if self.id == settings.USER_ID: prefix = ':' else: prefix = self.id + ':' for key in list(settings.list_cache): if key.startswith(prefix): del settings.list_cache[key] def clear_smart_list_cache(self): qs = List.query.filter_by(type='smart') smart_lists = [':%d' % l.id for l in qs] for key in list(settings.list_cache): if key in smart_lists: del settings.list_cache[key] def update_peering(self, peered, username=None): was_peering = self.peered if peered: logging.debug('update_peering, pending: %s queued: %s', self.pending, self.queued) self.queued = self.pending != 'sent' self.pending = '' if username: self.info['username'] = username self.update_name() # FIXME: need to set peered to False to not trigger changelog event # before other side receives acceptPeering request self.peered = False self.save() if not was_peering: Changelog.record(state.user(), 'addpeer', self.id, self.nickname) self.peered = True self.save() else: self.pending = '' self.peered = False self.queued = False self.update_name() self.save() List.query.filter_by(user_id=self.id).delete() for i in self.items: i.users.remove(self) if not i.users: i.delete() Changelog.query.filter_by(user_id=self.id).delete() if self.id in settings.ui['showFolder']: del settings.ui['showFolder'][self.id] self.clear_list_cache() self.save() if was_peering: Changelog.record(state.user(), 'removepeer', self.id) self.save() def update_name(self): if self.id == settings.USER_ID: name = settings.preferences.get('username', 'anonymous') else: name = self.info.get('nickname') or self.info.get( 'username') or 'anonymous' nickname = name n = 2 while self.query.filter_by(nickname=nickname).filter( User.id != self.id).first(): nickname = '%s [%d]' % (name, n) n += 1 self.nickname = nickname def migrate_id(self, service_id): if len(service_id) == 16: statements = [ "DELETE FROM user WHERE id = '{nid}'", "UPDATE user SET id = '{nid}' WHERE id = '{oid}'", "UPDATE list SET user_id = '{nid}' WHERE user_id = '{oid}'", "UPDATE useritem SET user_id = '{nid}' WHERE user_id = '{oid}'", "UPDATE changelog SET user_id = '{nid}' WHERE user_id = '{oid}'", ] with db.session() as session: for sql in statements: session.connection().execute( sql.format(oid=self.id, nid=service_id)) session.commit()
class SqliteDatabase(Logger): SQLITE_COLUMNS = {'float': sql.Float(), 'bool': sql.Boolean(), 'integer': sql.Integer(), 'pickle': sql.PickleType(), 'string': sql.String(512),} def __init__(self, path, attributes, name = 'table', verbosity = 0): self.WRITING_REQUESTS = [] self.READING_REQUESTS = {} self.UPDATE_REQUESTS = [] Logger.__init__(self, 'SQLite interface', verbosity = verbosity) self.db_path = 'sqlite:///%s/search_progress.db' % path self.attributes = attributes self.name = name self.log('creating database %s at %s' % (self.name, self.db_path), 'DEBUG') # create database self.db = sql.create_engine(self.db_path) self.db.echo = False self.metadata = sql.MetaData(self.db) # create table in database self.table = sql.Table(self.name, self.metadata) for name, att_type in self.attributes.items(): self.table.append_column(sql.Column(name, self.SQLITE_COLUMNS[att_type])) self.table.create(checkfirst = True) # start request processor self._process_requests() #==================================================================== def _return_dict(function): def wrapper(self, *args, **kwargs): entries = function(self, *args, **kwargs) info_dicts = [{key: entry[key] for key in self.attributes} for entry in entries] return info_dicts return wrapper #==================================================================== @thread def _process_requests(self): self._processing_requests = True keep_processing = True iteration_index = 0 while keep_processing: num_reading_requests = len(self.READING_REQUESTS) num_writing_requests = len(self.WRITING_REQUESTS) num_update_requests = len(self.UPDATE_REQUESTS) iteration_index += 1 # run at most one reading request request_keys = copy.deepcopy(list(self.READING_REQUESTS.keys())) for request_key in request_keys: if not self.READING_REQUESTS[request_key].executed: self.READING_REQUESTS[request_key].execute() break # run all update requests for update_index in range(num_update_requests): update_request = self.UPDATE_REQUESTS.pop() update_request.execute() # run at most one writing request if num_writing_requests > 0: writing_request = self.WRITING_REQUESTS.pop() writing_request.execute() # clean reading requests request_keys = copy.deepcopy(list(self.READING_REQUESTS.keys())) delete_keys = [] for request_key in request_keys: if self.READING_REQUESTS[request_key].entries_fetched: delete_keys.append(request_key) for request_key in delete_keys: del self.READING_REQUESTS[request_key] keep_processing = len(self.WRITING_REQUESTS) > 0 or len(self.UPDATE_REQUESTS) > 0 or len(self.READING_REQUESTS) > 0 self._processing_requests = False #==================================================================== def add(self, info_dict): add_entry = AddEntry(self.db, self.table, info_dict) self.WRITING_REQUESTS.append(add_entry) if not self._processing_requests: self._process_requests() @_return_dict def fetch_all(self, condition_dict): condition_keys = list(condition_dict.keys()) condition_values = list(condition_dict.values()) # define the selection selection = sql.select([self.table]) for index, key in enumerate(condition_keys): if isinstance(condition_values[index], list): # with a list, we need to combine all possibilities with _or if len(condition_values[index]) == 0: return [] filters = [getattr(self.table.c, key) == value for value in condition_values[index]] condition = sql.or_(*filters) else: condition = getattr(self.table.c, key) == condition_values[index] selection = selection.where(condition) fetch_entries = FetchEntries(self.db, self.table, selection) fetch_keys = str(uuid.uuid4()) self.READING_REQUESTS[fetch_keys] = fetch_entries if not self._processing_requests: self._process_requests() entries = fetch_entries.get_entries() self.log('fetched all information from database %s' % self.name, 'DEBUG') return entries def update_all(self, condition_dict, update_dict): condition_keys = list(condition_dict.keys()) condition_values = list(condition_dict.values()) # defining the selection update = sql.update(self.table).values(update_dict).where(getattr(self.table.c, condition_keys[0]) == condition_values[0]) for index, key in enumerate(condition_keys[1:]): update = update.where(getattr(self.table.c, key) == condition_values[index + 1]) # submitting the update update_entries = UpdateEntries(self.db, self.table, update) self.WRITING_REQUESTS.append(update_entries) if not self._processing_requests: self._process_requests()
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( 'Venue', sa.Column('upcoming_shows', sa.ARRAY(sa.PickleType()), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('job', sa.Column('testing', sa.PickleType(), nullable=True))
def upgrade(): dialect = api.get_engine().dialect deployments_columns = [ sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("parent_uuid", sa.String(length=36), nullable=True), sa.Column("name", sa.String(length=255), nullable=True), sa.Column("started_at", sa.DateTime(), nullable=True), sa.Column("completed_at", sa.DateTime(), nullable=True), sa.Column("config", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column("admin", sa.PickleType(), nullable=True), sa.Column("users", sa.PickleType(), nullable=False), sa.Column("enum_deployments_status", sa.Enum("cleanup->failed", "cleanup->finished", "cleanup->started", "deploy->failed", "deploy->finished", "deploy->inconsistent", "deploy->init", "deploy->started", "deploy->subdeploy", name="enum_deploy_status"), nullable=False), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name") ] if dialect.name.startswith("sqlite"): deployments_columns.append( sa.ForeignKeyConstraint(["parent_uuid"], [u"deployments.uuid"], name="fk_parent_uuid", use_alter=True)) # commands auto generated by Alembic - please adjust! op.create_table("deployments", *deployments_columns) op.create_index("deployment_parent_uuid", "deployments", ["parent_uuid"], unique=False) op.create_index("deployment_uuid", "deployments", ["uuid"], unique=True) if not dialect.name.startswith("sqlite"): op.create_foreign_key("fk_parent_uuid", "deployments", "deployments", ["parent_uuid"], ["uuid"]) op.create_table( "workers", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("hostname", sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("hostname", name="uniq_worker@hostname")) op.create_table( "resources", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("provider_name", sa.String(length=255), nullable=True), sa.Column("type", sa.String(length=255), nullable=True), sa.Column("info", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(["deployment_uuid"], [u"deployments.uuid"]), sa.PrimaryKeyConstraint("id")) op.create_index("resource_deployment_uuid", "resources", ["deployment_uuid"], unique=False) op.create_index("resource_provider_name", "resources", ["deployment_uuid", "provider_name"], unique=False) op.create_index("resource_provider_name_and_type", "resources", ["deployment_uuid", "provider_name", "type"], unique=False) op.create_index("resource_type", "resources", ["deployment_uuid", "type"], unique=False) op.create_table( "tasks", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("status", sa.Enum("aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying", name="enum_tasks_status"), nullable=False), sa.Column("verification_log", sa.Text(), nullable=True), sa.Column("tag", sa.String(length=64), nullable=True), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.ForeignKeyConstraint( ["deployment_uuid"], [u"deployments.uuid"], ), sa.PrimaryKeyConstraint("id")) op.create_index("task_deployment", "tasks", ["deployment_uuid"], unique=False) op.create_index("task_status", "tasks", ["status"], unique=False) op.create_index("task_uuid", "tasks", ["uuid"], unique=True) op.create_table( "verifications", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.Column("status", sa.Enum("aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying", name="enum_tasks_status"), nullable=False), sa.Column("set_name", sa.String(length=20), nullable=True), sa.Column("tests", sa.Integer(), nullable=True), sa.Column("errors", sa.Integer(), nullable=True), sa.Column("failures", sa.Integer(), nullable=True), sa.Column("time", sa.Float(), nullable=True), sa.ForeignKeyConstraint( ["deployment_uuid"], [u"deployments.uuid"], ), sa.PrimaryKeyConstraint("id")) op.create_index("verification_uuid", "verifications", ["uuid"], unique=True) op.create_table( "task_results", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("key", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column("data", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=True), sa.ForeignKeyConstraint( ["task_uuid"], ["tasks.uuid"], ), sa.PrimaryKeyConstraint("id")) op.create_table( "verification_results", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("verification_uuid", sa.String(length=36), nullable=True), sa.Column("data", sa_types.MutableJSONEncodedDict(), nullable=False), sa.ForeignKeyConstraint(["verification_uuid"], ["verifications.uuid"]), sa.PrimaryKeyConstraint("id"))
def upgrade(): op.add_column('listener', sa.Column('insert_headers', sa.PickleType()))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('team', sa.Column('preferred_names', sa.PickleType(), nullable=True))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() op.create_table( 'role', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=80), nullable=True), sa.Column('description', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table( 'User', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=255), nullable=True), sa.Column('password', sa.String(length=255), nullable=True), sa.Column('first_name', sa.String(length=255), nullable=True), sa.Column('last_name', sa.String(length=255), nullable=True), sa.Column('active', sa.Boolean(), nullable=True), sa.Column('confirmed_at', sa.DateTime(), nullable=True), sa.Column('bookmarks_per_page', sa.Integer(), nullable=True), sa.Column('api_key', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('api_key'), sa.UniqueConstraint('email')) op.create_table('roles_users', sa.Column('User_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['User_id'], ['User.id'], ), sa.ForeignKeyConstraint( ['role_id'], ['role.id'], )) op.create_table( 'Bookmarks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=1024), nullable=True), sa.Column('description', sa.String(length=256), nullable=True), sa.Column('main_url', sa.String(length=2000), nullable=True), sa.Column('added_on', sa.DateTime(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=True), sa.Column('search_vector', TSVECTOR), sa.Column('readability_html', sa.Text(), nullable=True), sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True), sa.Column('full_text', sa.Text(), nullable=True), sa.Column('fulltext_vector', TSVECTOR), sa.ForeignKeyConstraint( ['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id')) sync_trigger(conn, 'Bookmarks', 'fulltext_vector', ['full_text']) sync_trigger(conn, 'Bookmarks', 'search_vector', ['title', 'description']) op.create_table('tag', sa.Column('id', sa.Integer(), nullable=False), sa.Column('text', sa.String(length=255), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('count', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('tab', sa.Column('id', sa.String(length=50), nullable=False), sa.Column('tabs', sa.PickleType(), nullable=True), sa.Column('added_on', sa.DateTime(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('title', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint( ['user'], ['User.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'Archive', sa.Column('id', sa.Integer(), nullable=False), sa.Column('web_page', sa.Integer(), nullable=True), sa.Column('service', sa.String(length=2048), nullable=True), sa.Column('archived_on', sa.DateTime(), nullable=True), sa.Column('archive_url', sa.String(length=2000), nullable=True), sa.Column('status', sa.String(length=2000), nullable=True), sa.ForeignKeyConstraint( ['web_page'], ['Bookmarks.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('coordinates', sa.Column('id', sa.Integer(), nullable=False), sa.Column('latitude', sa.Float(), nullable=True), sa.Column('longitude', sa.Float(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=40), nullable=False), sa.Column('full_name', sa.String(length=128), nullable=True), sa.Column('password', sa.String(length=200), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username') ) op.create_table('coordinates_pairs', sa.Column('id', sa.Integer(), nullable=False), sa.Column('origin_id', sa.Integer(), nullable=True), sa.Column('target_id', sa.Integer(), nullable=True), sa.Column('distance', sa.Float(), nullable=True), sa.Column('time_transit', sa.Float(), nullable=True), sa.Column('time_on_foot', sa.Float(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('calculated', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['origin_id'], ['coordinates.id'], ), sa.ForeignKeyConstraint(['target_id'], ['coordinates.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('groups', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=120), nullable=True), sa.Column('owner_id', sa.Integer(), nullable=True), sa.Column('status', sa.String(length=40), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['owner_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('sessions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('expires', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('submissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('category', sa.String(length=40), nullable=True), sa.Column('origin', sa.String(length=40), nullable=True), sa.Column('title', sa.String(length=255), nullable=True), sa.Column('url', sa.String(length=512), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.Column('price', sa.Integer(), nullable=True), sa.Column('source_latitude', sa.Float(), nullable=True), sa.Column('source_longitude', sa.Float(), nullable=True), sa.Column('coordinates_id', sa.Integer(), nullable=True), sa.Column('is_scraped', sa.Boolean(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['coordinates_id'], ['coordinates.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('areas', sa.Column('id', sa.Integer(), nullable=False), sa.Column('group_id', sa.Integer(), nullable=True), sa.Column('type', sa.String(length=40), nullable=True), sa.Column('center', sa.Integer(), nullable=True), sa.Column('radius', sa.Integer(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['center'], ['coordinates.id'], ), sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('submissions_calculated', sa.Column('id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=True), sa.Column('group_id', sa.Integer(), nullable=True), sa.Column('rating', sa.Float(), nullable=True), sa.Column('parameters', sa.PickleType(), nullable=True), sa.Column('status', sa.String(length=40), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), sa.ForeignKeyConstraint(['submission_id'], ['submissions.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('users_groups_assoc', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('group_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['group_id'], ['groups.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('user_id', 'group_id') ) op.create_table('actions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('target_id', sa.Integer(), nullable=True), sa.Column('action', sa.String(length=40), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['target_id'], ['submissions_calculated.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('submissions_calculated_pairs_assoc', sa.Column('pair_id', sa.Integer(), nullable=False), sa.Column('submission_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['pair_id'], ['coordinates_pairs.id'], ), sa.ForeignKeyConstraint(['submission_id'], ['submissions_calculated.id'], ), sa.PrimaryKeyConstraint('pair_id', 'submission_id') )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'game', sa.Column('id', sa.Integer(), nullable=False), sa.Column('start_datetime', sa.DateTime(), nullable=True), sa.Column('round_count', sa.Integer(), nullable=True), sa.Column('board_to_play', sa.Integer(), nullable=True), sa.Column('intake_cols', sa.Integer(), nullable=True), sa.Column('available_pickle', sa.PickleType(), nullable=True), sa.Column('board_list_pickle', sa.PickleType(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('emergency', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.Column('round_count', sa.Integer(), nullable=False), sa.Column('board_played', sa.String(length=25), nullable=True), sa.Column('moves', sa.PickleType(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('market', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('outreach', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('permanent', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('rapid', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'record', sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.Column('round_count', sa.Integer(), nullable=False), sa.Column('board_name', sa.String(length=25), nullable=False), sa.Column('beads_in', sa.Integer(), nullable=True), sa.Column('beads_out', sa.Integer(), nullable=True), sa.Column('end_count', sa.Integer(), nullable=False), sa.Column('note', sa.String(length=500), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('score', sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.Column('unsheltered', sa.Integer(), nullable=True), sa.Column('market', sa.Integer(), nullable=True), sa.Column('rapid', sa.Integer(), nullable=True), sa.Column('perm', sa.Integer(), nullable=True), sa.Column('emerg_total', sa.Integer(), nullable=True), sa.Column('trans_total', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('transitional', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.Column('no_red', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('unsheltered', sa.Column('board', sa.PickleType(), nullable=True), sa.Column('maximum', sa.Integer(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('game_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['game_id'], ['game.id'], ), sa.PrimaryKeyConstraint('id'))
class Listener(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin, models.TimestampMixin, base_models.NameMixin, base_models.TagMixin): __data_model__ = data_models.Listener __tablename__ = "listener" __v2_wsme__ = listener.ListenerResponse __table_args__ = (sa.UniqueConstraint( 'load_balancer_id', 'protocol', 'protocol_port', name='uq_listener_load_balancer_id_protocol_port'), ) description = sa.Column(sa.String(255), nullable=True) protocol = sa.Column(sa.String(16), sa.ForeignKey("protocol.name", name="fk_listener_protocol_name"), nullable=False) protocol_port = sa.Column(sa.Integer(), nullable=False) connection_limit = sa.Column(sa.Integer, nullable=True) load_balancer_id = sa.Column(sa.String(36), sa.ForeignKey( "load_balancer.id", name="fk_listener_load_balancer_id"), nullable=True) tls_certificate_id = sa.Column(sa.String(255), nullable=True) default_pool_id = sa.Column(sa.String(36), sa.ForeignKey("pool.id", name="fk_listener_pool_id"), nullable=True) provisioning_status = sa.Column( sa.String(16), sa.ForeignKey("provisioning_status.name", name="fk_listener_provisioning_status_name"), nullable=False) operating_status = sa.Column(sa.String(16), sa.ForeignKey( "operating_status.name", name="fk_listener_operating_status_name"), nullable=False) enabled = sa.Column(sa.Boolean(), nullable=False) load_balancer = orm.relationship("LoadBalancer", uselist=False, back_populates="listeners") default_pool = orm.relationship("Pool", uselist=False, back_populates="_default_listeners") sni_containers = orm.relationship('SNI', cascade='delete', uselist=True, backref=orm.backref('listener', uselist=False)) l7policies = orm.relationship('L7Policy', uselist=True, order_by='L7Policy.position', collection_class=orderinglist.ordering_list( 'position', count_from=1), cascade='delete', back_populates='listener') peer_port = sa.Column(sa.Integer(), nullable=True) insert_headers = sa.Column(sa.PickleType()) timeout_client_data = sa.Column(sa.Integer, nullable=True) timeout_member_connect = sa.Column(sa.Integer, nullable=True) timeout_member_data = sa.Column(sa.Integer, nullable=True) timeout_tcp_inspect = sa.Column(sa.Integer, nullable=True) client_ca_tls_certificate_id = sa.Column(sa.String(255), nullable=True) client_authentication = sa.Column( sa.String(10), sa.ForeignKey("client_authentication_mode.name", name="fk_listener_client_authentication_mode_name"), nullable=False, default=constants.CLIENT_AUTH_NONE) client_crl_container_id = sa.Column(sa.String(255), nullable=True) _tags = orm.relationship( 'Tags', single_parent=True, lazy='subquery', cascade='all,delete-orphan', primaryjoin='and_(foreign(Tags.resource_id)==Listener.id)') # This property should be a unique list of the default_pool and anything # referenced by enabled L7Policies with at least one rule that also # reference this listener. The intent is that listener.pools should be a # unique list of pools this listener is *actually* using. @property def pools(self): _pools = [] _p_ids = [] if self.default_pool: _pools.append(self.default_pool) _p_ids.append(self.default_pool.id) l7_pools = [ p.redirect_pool for p in self.l7policies if p.redirect_pool is not None and len(p.l7rules) > 0 and p.enabled is True ] for p in l7_pools: if p.id not in _p_ids: _pools.append(p) _p_ids.append(p.id) return _pools allowed_cidrs = orm.relationship('ListenerCidr', cascade='all,delete-orphan', uselist=True, backref=orm.backref('listener', uselist=False))
class List(db.Model): __tablename__ = 'list' id = sa.Column(sa.Integer(), primary_key=True) name = sa.Column(sa.String()) index_ = sa.Column(sa.Integer()) type = sa.Column(sa.String(64)) _query = sa.Column( 'query', MutableDict.as_mutable(sa.PickleType(pickler=json_pickler))) user_id = sa.Column(sa.String(43), sa.ForeignKey('user.id')) user = sa.orm.relationship('User', backref=sa.orm.backref('lists', lazy='dynamic')) items = sa.orm.relationship('Item', secondary=list_items, backref=sa.orm.backref('lists', lazy='dynamic')) @classmethod def get(cls, user_id, name=None): if name is None: user_id, name = cls.get_user_name(user_id) return cls.query.filter_by(user_id=user_id, name=name).first() @classmethod def get_user_name(cls, user_id): nickname, name = user_id.split(':', 1) if nickname: user = User.query.filter_by(nickname=nickname).first() user_id = user.id else: user_id = settings.USER_ID return user_id, name @classmethod def get_or_create(cls, user_id, name=None, query=None): if name is None: user_id, name = cls.get_user_name(user_id) l = cls.get(user_id, name) if not l: l = cls.create(user_id, name, query) return l @classmethod def create(cls, user_id, name, query=None): prefix = name n = 2 while cls.get(user_id, name): name = '%s [%s]' % (prefix, n) n += 1 l = cls(user_id=user_id, name=name) l._query = query l.type = 'smart' if l._query else 'static' l.index_ = cls.query.filter_by(user_id=user_id).count() state.db.session.add(l) state.db.session.commit() if user_id == settings.USER_ID: if not l._query and name != '': Changelog.record(state.user(), 'addlist', l.name) return l @classmethod def rename_user(cls, old, new): for l in cls.query.filter(cls._query != None): def update_conditions(conditions): changed = False for c in conditions: if 'conditions' in c: changed = update_conditions(conditions) else: if c.get('key') == 'list' and c.get( 'value', '').startswith('%s:' % old): c['value'] = '%s:%s' % new, c['value'].split( ':', 1)[1] changed = True return changed if update_conditions(l._query.get('conditions', [])): l.save() def add_items(self, items): from item.models import Item for item_id in items: i = Item.get(item_id) if i: self.items.append(i) if self.user_id == settings.USER_ID: i.queue_download() i.update() state.db.session.add(self) state.db.session.commit() if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'addlistitems', self.name, items) self.user.clear_smart_list_cache() self.user.clear_list_cache() def get_items(self): from item.models import Item, user_items if self.type == 'smart': return Parser(Item, user_items).find({'query': self._query}) else: return self.user.items.join( Item.lists, aliased=True).filter(List.id == self.id) def remove_items(self, items): from item.models import Item for item_id in items: i = Item.get(item_id) if i in self.items: self.items.remove(i) i.update() state.db.session.add(self) state.db.session.commit() if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'removelistitems', self.name, items) self.user.clear_smart_list_cache() self.user.clear_list_cache() def remove(self): if not self._query: for i in self.items: self.items.remove(i) if not self._query: if self.user_id == settings.USER_ID and self.name != '': Changelog.record(self.user, 'removelist', self.name) state.db.session.delete(self) state.db.session.commit() @property def public_id(self): id = '' if self.user_id != settings.USER_ID: id += self.user.nickname id = '%s:%s' % (id, self.name) return id @property def find_id(self): id = '' if self.user_id != settings.USER_ID: id += self.user_id id = '%s:%s' % (id, self.id) return id def __repr__(self): return self.public_id def items_count(self): return self.get_items().count() key = self.find_id if key in settings.list_cache: value = settings.list_cache[key] else: value = self.get_items().count() settings.list_cache[key] = value return value def json(self): r = { 'id': self.public_id, 'user': self.user.name, 'name': self.name, 'index': self.index_, # to slow for many smart lists 'items': self.items_count(), 'type': self.type } if self.name == '': r['name'] = 'Library' r['type'] = 'library' del r['index'] if self.type == 'smart': r['query'] = self._query return r def save(self): state.db.session.add(self) state.db.session.commit()
class Listener(base_models.BASE, base_models.IdMixin, base_models.ProjectMixin, models.TimestampMixin, base_models.NameMixin): __data_model__ = data_models.Listener __tablename__ = "listener" __table_args__ = (sa.UniqueConstraint( 'load_balancer_id', 'protocol_port', name='uq_listener_load_balancer_id_protocol_port'), ) description = sa.Column(sa.String(255), nullable=True) protocol = sa.Column(sa.String(16), sa.ForeignKey("protocol.name", name="fk_listener_protocol_name"), nullable=False) protocol_port = sa.Column(sa.Integer(), nullable=False) connection_limit = sa.Column(sa.Integer, nullable=True) load_balancer_id = sa.Column(sa.String(36), sa.ForeignKey( "load_balancer.id", name="fk_listener_load_balancer_id"), nullable=True) tls_certificate_id = sa.Column(sa.String(36), nullable=True) default_pool_id = sa.Column(sa.String(36), sa.ForeignKey("pool.id", name="fk_listener_pool_id"), nullable=True) provisioning_status = sa.Column( sa.String(16), sa.ForeignKey("provisioning_status.name", name="fk_listener_provisioning_status_name"), nullable=False) operating_status = sa.Column(sa.String(16), sa.ForeignKey( "operating_status.name", name="fk_listener_operating_status_name"), nullable=False) enabled = sa.Column(sa.Boolean(), nullable=False) load_balancer = orm.relationship("LoadBalancer", uselist=False, backref=orm.backref("listeners", uselist=True, cascade="delete")) default_pool = orm.relationship("Pool", uselist=False, backref=orm.backref("_default_listeners", uselist=True)) peer_port = sa.Column(sa.Integer(), nullable=True) insert_headers = sa.Column(sa.PickleType()) # This property should be a unique list of the default_pool and anything # referenced by enabled L7Policies with at least one rule that also # reference this listener. The intent is that listener.pools should be a # unique list of pools this listener is *actually* using. @property def pools(self): _pools = [] _p_ids = [] if self.default_pool: _pools.append(self.default_pool) _p_ids.append(self.default_pool.id) l7_pools = [ p.redirect_pool for p in self.l7policies if p.redirect_pool is not None and len(p.l7rules) > 0 and p.enabled is True ] for p in l7_pools: if p.id not in _p_ids: _pools.append(p) _p_ids.append(p.id) return _pools
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('artist', sa.Column('genres', sa.PickleType(), nullable=True)) op.add_column('venue', sa.Column('genres', sa.PickleType(), nullable=True))
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('groups', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('admin', sa.Boolean(), nullable=False), sa.Column('super_mod', sa.Boolean(), nullable=False), sa.Column('mod', sa.Boolean(), nullable=False), sa.Column('guest', sa.Boolean(), nullable=False), sa.Column('banned', sa.Boolean(), nullable=False), sa.Column('mod_edituser', sa.Boolean(), nullable=False), sa.Column('mod_banuser', sa.Boolean(), nullable=False), sa.Column('editpost', sa.Boolean(), nullable=False), sa.Column('deletepost', sa.Boolean(), nullable=False), sa.Column('deletetopic', sa.Boolean(), nullable=False), sa.Column('posttopic', sa.Boolean(), nullable=False), sa.Column('postreply', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name')) op.create_table('categories', sa.Column('id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('position', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table('settingsgroup', sa.Column('key', sa.String(length=255), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('key')) op.create_table( 'settings', sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.PickleType(), nullable=False), sa.Column('settingsgroup', sa.String(length=255), nullable=False), sa.Column('name', sa.String(length=200), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('value_type', sa.String(length=20), nullable=False), sa.Column('extra', sa.PickleType(), nullable=True), sa.ForeignKeyConstraint(['settingsgroup'], ['settingsgroup.key'], name='fk_settingsgroup', use_alter=True), sa.PrimaryKeyConstraint('key')) op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=200), nullable=False), sa.Column('email', sa.String(length=200), nullable=False), sa.Column('password', sa.String(length=120), nullable=False), sa.Column('date_joined', sa.DateTime(), nullable=True), sa.Column('lastseen', sa.DateTime(), nullable=True), sa.Column('birthday', sa.DateTime(), nullable=True), sa.Column('gender', sa.String(length=10), nullable=True), sa.Column('website', sa.String(length=200), nullable=True), sa.Column('location', sa.String(length=100), nullable=True), sa.Column('signature', sa.Text(), nullable=True), sa.Column('avatar', sa.String(length=200), nullable=True), sa.Column('notes', sa.Text(), nullable=True), sa.Column('theme', sa.String(length=15), nullable=True), sa.Column('language', sa.String(length=15), nullable=True), sa.Column('post_count', sa.Integer(), nullable=True), sa.Column('primary_group_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['primary_group_id'], ['groups.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username')) op.create_table( 'topictracker', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('topic_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['topic_id'], ['topics.id'], name='fk_tracker_topic_id', use_alter=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], )) op.create_table('groups_users', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('group_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['group_id'], ['groups.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], )) op.create_table( 'forumsread', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('forum_id', sa.Integer(), nullable=False), sa.Column('last_read', sa.DateTime(), nullable=True), sa.Column('cleared', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['forum_id'], ['forums.id'], name='fk_fr_forum_id', use_alter=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('user_id', 'forum_id')) op.create_table( 'moderators', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('forum_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['forum_id'], ['forums.id'], name='fk_forum_id', use_alter=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], )) op.create_table( 'posts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('topic_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('username', sa.String(length=200), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('date_modified', sa.DateTime(), nullable=True), sa.Column('modified_by', sa.String(length=200), nullable=True), sa.ForeignKeyConstraint(['topic_id'], ['topics.id'], name='fk_post_topic_id', ondelete='CASCADE', use_alter=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('privatemessages', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('from_user_id', sa.Integer(), nullable=True), sa.Column('to_user_id', sa.Integer(), nullable=True), sa.Column('subject', sa.String(length=255), nullable=True), sa.Column('message', sa.Text(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('trash', sa.Boolean(), nullable=False), sa.Column('draft', sa.Boolean(), nullable=False), sa.Column('unread', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['from_user_id'], ['users.id'], ), sa.ForeignKeyConstraint( ['to_user_id'], ['users.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'topicsread', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('topic_id', sa.Integer(), nullable=False), sa.Column('forum_id', sa.Integer(), nullable=False), sa.Column('last_read', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['forum_id'], ['forums.id'], name='fk_tr_forum_id', use_alter=True), sa.ForeignKeyConstraint(['topic_id'], ['topics.id'], name='fk_tr_topic_id', use_alter=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('user_id', 'topic_id', 'forum_id')) op.create_table( 'topics', sa.Column('id', sa.Integer(), nullable=False), sa.Column('forum_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=255), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('username', sa.String(length=200), nullable=False), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('last_updated', sa.DateTime(), nullable=True), sa.Column('locked', sa.Boolean(), nullable=True), sa.Column('important', sa.Boolean(), nullable=True), sa.Column('views', sa.Integer(), nullable=True), sa.Column('post_count', sa.Integer(), nullable=True), sa.Column('first_post_id', sa.Integer(), nullable=True), sa.Column('last_post_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['first_post_id'], ['posts.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['forum_id'], ['forums.id'], name='fk_topic_forum_id', use_alter=True), sa.ForeignKeyConstraint( ['last_post_id'], ['posts.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('reports', sa.Column('id', sa.Integer(), nullable=False), sa.Column('reporter_id', sa.Integer(), nullable=False), sa.Column('reported', sa.DateTime(), nullable=True), sa.Column('post_id', sa.Integer(), nullable=False), sa.Column('zapped', sa.DateTime(), nullable=True), sa.Column('zapped_by', sa.Integer(), nullable=True), sa.Column('reason', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['post_id'], ['posts.id'], ), sa.ForeignKeyConstraint( ['reporter_id'], ['users.id'], ), sa.ForeignKeyConstraint( ['zapped_by'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'forums', sa.Column('id', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=True), sa.Column('position', sa.Integer(), nullable=False), sa.Column('locked', sa.Boolean(), nullable=False), sa.Column('show_moderators', sa.Boolean(), nullable=False), sa.Column('external', sa.String(length=200), nullable=True), sa.Column('post_count', sa.Integer(), nullable=False), sa.Column('topic_count', sa.Integer(), nullable=False), sa.Column('last_post_id', sa.Integer(), nullable=True), sa.Column('last_post_title', sa.String(length=255), nullable=True), sa.Column('last_post_user_id', sa.Integer(), nullable=True), sa.Column('last_post_username', sa.String(length=255), nullable=True), sa.Column('last_post_created', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['category_id'], ['categories.id'], ), sa.ForeignKeyConstraint( ['last_post_id'], ['posts.id'], ), sa.ForeignKeyConstraint( ['last_post_user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id'))
def upgrade(): op.add_column('executions', sa.Column('token', sa.String(length=100), nullable=True)) bind = op.get_bind() session = orm.Session(bind=bind) Config.__table__.create(bind) session.add_all([ Config(name='rest_service_log_path', value='/var/log/cloudify/rest/cloudify-rest-service.log', scope='rest', schema=None, is_editable=False), Config(name='rest_service_log_level', value='INFO', scope='rest', schema={ 'type': 'string', 'enum': LOG_LEVELS_ENUM }, is_editable=True), Config(name='ldap_server', value=None, scope='rest', schema={'type': 'string'}, is_editable=True), Config(name='ldap_username', value=None, scope='rest', schema={'type': 'string'}, is_editable=True), Config(name='ldap_password', value=None, scope='rest', schema={'type': 'string'}, is_editable=True), Config(name='ldap_domain', value=None, scope='rest', schema={'type': 'string'}, is_editable=True), Config(name='ldap_is_active_directory', value=None, scope='rest', schema={'type': 'boolean'}, is_editable=True), Config(name='ldap_dn_extra', value=None, scope='rest', schema=None, is_editable=True), Config(name='ldap_timeout', value=5.0, scope='rest', schema={'type': 'number'}, is_editable=True), Config(name='ldap_nested_levels', value=1, scope='rest', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='file_server_root', value='/opt/manager/resources', scope='rest', schema=None, is_editable=False), Config(name='file_server_url', value='http://127.0.0.1:53333/resources', scope='rest', schema=None, is_editable=False), Config(name='insecure_endpoints_disabled', value=True, scope='rest', schema={'type': 'boolean'}, is_editable=False), Config(name='maintenance_folder', value='/opt/manager/maintenance', scope='rest', schema=None, is_editable=False), Config(name='min_available_memory_mb', value=100, scope='rest', schema={ 'type': 'number', 'minimum': 0 }, is_editable=True), Config(name='failed_logins_before_account_lock', value=4, scope='rest', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='account_lock_period', value=-1, scope='rest', schema={ 'type': 'number', 'minimum': -1 }, is_editable=True), Config(name='public_ip', value=None, scope='rest', schema=None, is_editable=False), Config(name='default_page_size', value=1000, scope='rest', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='max_workers', value=5, scope='mgmtworker', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='min_workers', value=2, scope='mgmtworker', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='broker_port', value=5671, scope='agent', schema={ 'type': 'number', 'minimum': 1, 'maximum': 65535 }, is_editable=True), Config(name='min_workers', value=2, scope='agent', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='max_workers', value=5, scope='agent', schema={ 'type': 'number', 'minimum': 1 }, is_editable=True), Config(name='heartbeat', value=30, scope='agent', schema={ 'type': 'number', 'minimum': 0 }, is_editable=True), Config(name='log_level', value='info', scope='agent', schema={ 'type': 'string', 'enum': LOG_LEVELS_ENUM }), Config( name='task_retries', value=60, scope='workflow', schema={ 'type': 'number', 'minimum': -1 }, ), Config( name='task_retry_interval', value=15, scope='workflow', schema={ 'type': 'number', 'minimum': 0 }, ), Config( name='subgraph_retries', value=0, scope='workflow', schema={ 'type': 'number', 'minimum': -1 }, ) ]) session.commit() op.create_table( 'certificates', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('name', sa.Text(), unique=True, nullable=False), sa.Column('value', sa.Text(), unique=False, nullable=False), sa.Column('updated_at', UTCDateTime(), nullable=True), sa.Column('_updater_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['_updater_id'], [u'users.id'], ondelete='SET NULL'), sa.PrimaryKeyConstraint('id', name=op.f('certificates_pkey'))) op.create_table( 'managers', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('hostname', sa.Text(), unique=True, nullable=False), sa.Column('private_ip', sa.Text(), unique=True, nullable=False), sa.Column('public_ip', sa.Text(), unique=True, nullable=False), sa.Column('version', sa.Text(), nullable=False), sa.Column('edition', sa.Text(), nullable=False), sa.Column('distribution', sa.Text(), nullable=False), sa.Column('distro_release', sa.Text(), nullable=False), sa.Column('fs_sync_node_id', sa.Text(), nullable=True), sa.Column('networks', JSONString(), nullable=True), sa.Column('_ca_cert_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['_ca_cert_id'], [u'certificates.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('managers_pkey'))) op.create_table( 'rabbitmq_brokers', sa.Column('name', sa.Text(), nullable=False), sa.Column('host', sa.Text(), nullable=False), sa.Column('management_host', sa.Text(), nullable=True), sa.Column('port', sa.Integer()), sa.Column('username', sa.Text(), nullable=True), sa.Column('password', sa.Text(), nullable=True), sa.Column('params', JSONString(), nullable=True), sa.Column('networks', JSONString(), nullable=True), sa.Column('_ca_cert_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['_ca_cert_id'], [u'certificates.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('name', name=op.f('rabbitmq_brokers_pkey'))) op.add_column('deployment_updates', sa.Column('central_plugins_to_install', sa.PickleType())) op.add_column('deployment_updates', sa.Column('central_plugins_to_uninstall', sa.PickleType())) op.add_column( 'blueprints', sa.Column('is_hidden', sa.Boolean(), nullable=False, server_default='f')) _create_sites_table() _create_plugins_update_table()
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('celery_taskmeta', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('task_id', sa.String(length=155), nullable=True), sa.Column('status', sa.String(length=50), nullable=True), sa.Column('result', sa.PickleType(), nullable=True), sa.Column('date_done', sa.DateTime(), nullable=True), sa.Column('traceback', sa.Text(), nullable=True), sa.Column('name', sa.String(length=155), nullable=True), sa.Column('args', sa.LargeBinary(), nullable=True), sa.Column('kwargs', sa.LargeBinary(), nullable=True), sa.Column('worker', sa.String(length=155), nullable=True), sa.Column('retries', sa.Integer(), nullable=True), sa.Column('queue', sa.String(length=155), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('task_id'), sqlite_autoincrement=True) op.create_table('celery_tasksetmeta', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('taskset_id', sa.String(length=155), nullable=True), sa.Column('result', sa.PickleType(), nullable=True), sa.Column('date_done', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('taskset_id'), sqlite_autoincrement=True) op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=60), nullable=False), sa.Column('email', sa.String(length=120), nullable=False), sa.Column('first_name', sa.String(length=32), nullable=True), sa.Column('last_name', sa.String(length=32), nullable=True), sa.Column('created_date', sa.DateTime(), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('active', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email')) op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) op.create_table( 'hosts', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('hostname', sa.String(length=253), nullable=False), sa.Column('port', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=32), nullable=True), sa.Column('ssh_options', sa.Text(), nullable=True), sa.Column('authentication_method', sa.Enum('password', 'key_pair', name='authenticationmethod'), nullable=True), sa.Column('password', sa.String(length=128), nullable=True), sa.Column('private_key', sa.Text(), nullable=True), sa.Column('encrypt_authentication', sa.Boolean(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'name', name='hosts_user_id_name_unique')) op.create_index(op.f('ix_hosts_hostname'), 'hosts', ['hostname'], unique=False) op.create_index(op.f('ix_hosts_name'), 'hosts', ['name'], unique=False) op.create_table('roles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'host_commands', sa.Column('id', sa.Integer(), nullable=False), sa.Column('command', sa.Text(), nullable=False), sa.Column('status', sa.Boolean(), nullable=False), sa.Column('latest_result', sa.Text(), nullable=True), sa.Column('last_completed_at', sa.DateTime(), nullable=True), sa.Column('host_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['host_id'], ['hosts.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('host_id', 'command', name='host_id_command_unique')) op.create_index(op.f('ix_host_commands_command'), 'host_commands', ['command'], unique=False) op.create_table( 'host_commands_events', sa.Column('id', sa.Integer(), nullable=False), sa.Column('result', sa.Text(), nullable=False), sa.Column('exit_code', sa.Integer(), nullable=False), sa.Column('completed_at', sa.DateTime(), nullable=False), sa.Column('host_command_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['host_command_id'], ['host_commands.id'], ), sa.PrimaryKeyConstraint('id')) # ### end Alembic commands ### op.execute(""" CREATE FUNCTION update_command_event() RETURNS trigger AS $update_command_event$ BEGIN update host_commands set last_completed_at = new.completed_at, latest_result = new.result where id = new.host_command_id; RETURN NEW; END; $update_command_event$ LANGUAGE plpgsql; CREATE TRIGGER update_host_command_with_event_detail AFTER INSERT ON host_commands_events for each row execute procedure update_command_event(); """) op.execute(""" CREATE SEQUENCE IF NOT EXISTS task_id_sequence; """)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('ad_size', sa.Column('id', sa.Integer(), nullable=False), sa.Column('width', sa.Integer(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('agent', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('team', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('type', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('client', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('industry', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('medium', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('owner_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['owner_id'], ['team.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('email', sa.String(length=120), nullable=True), sa.Column('pwdhash', sa.String(length=100), nullable=True), sa.Column('phone', sa.String(length=120), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('team_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['team_id'], ['team.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('phone')) op.create_table( 'bra_comment', sa.Column('id', sa.Integer(), nullable=False), sa.Column('target_type', sa.String(length=50), nullable=True), sa.Column('target_id', sa.Integer(), nullable=True), sa.Column('msg', sa.String(length=1000), nullable=True), sa.Column('creator_id', sa.Integer(), nullable=True), sa.Column('create_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['creator_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'ad_position', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('description', sa.String(length=500), nullable=True), sa.Column('size_id', sa.Integer(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('level', sa.Integer(), nullable=True), sa.Column('medium_id', sa.Integer(), nullable=True), sa.Column('ad_type', sa.Integer(), nullable=True), sa.Column('cpd_num', sa.Integer(), nullable=True), sa.Column('max_order_num', sa.Integer(), nullable=True), sa.Column('price', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['medium_id'], ['medium.id'], ), sa.ForeignKeyConstraint( ['size_id'], ['ad_size.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'ad_unit', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('description', sa.String(length=500), nullable=True), sa.Column('size_id', sa.Integer(), nullable=True), sa.Column('margin', sa.String(length=50), nullable=True), sa.Column('target', sa.Integer(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('medium_id', sa.Integer(), nullable=True), sa.Column('estimate_num', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['medium_id'], ['medium.id'], ), sa.ForeignKeyConstraint( ['size_id'], ['ad_size.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'bra_order', sa.Column('id', sa.Integer(), nullable=False), sa.Column('client_id', sa.Integer(), nullable=True), sa.Column('campaign', sa.String(length=100), nullable=True), sa.Column('medium_id', sa.Integer(), nullable=True), sa.Column('order_type', sa.Integer(), nullable=True), sa.Column('contract', sa.String(length=100), nullable=True), sa.Column('money', sa.Integer(), nullable=True), sa.Column('agent_id', sa.Integer(), nullable=True), sa.Column('creator_id', sa.Integer(), nullable=True), sa.Column('create_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['agent_id'], ['agent.id'], ), sa.ForeignKeyConstraint( ['client_id'], ['client.id'], ), sa.ForeignKeyConstraint( ['creator_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['medium_id'], ['medium.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('order_agent_sales', sa.Column('agent_sale_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['agent_sale_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], )) op.create_table('order_users_operater', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], )) op.create_table( 'bra_item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('order_id', sa.Integer(), nullable=True), sa.Column('description', sa.String(length=500), nullable=True), sa.Column('sale_type', sa.Integer(), nullable=True), sa.Column('position_id', sa.Integer(), nullable=True), sa.Column('special_sale', sa.Boolean(), nullable=True), sa.Column('price', sa.Integer(), nullable=True), sa.Column('ad_type', sa.Integer(), nullable=True), sa.Column('priority', sa.Integer(), nullable=True), sa.Column('speed', sa.Integer(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('item_status', sa.Integer(), nullable=True), sa.Column('creator_id', sa.Integer(), nullable=True), sa.Column('create_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['creator_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], ), sa.ForeignKeyConstraint( ['position_id'], ['ad_position.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('order_users_designerer', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], )) op.create_table('order_users_planer', sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], )) op.create_table( 'ad_position_unit', sa.Column('position_id', sa.Integer(), nullable=True), sa.Column('unit_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['position_id'], ['ad_position.id'], ), sa.ForeignKeyConstraint( ['unit_id'], ['ad_unit.id'], )) op.create_table('order_direct_sales', sa.Column('sale_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['order_id'], ['bra_order.id'], ), sa.ForeignKeyConstraint( ['sale_id'], ['user.id'], )) op.create_table('bra_schedule', sa.Column('id', sa.Integer(), nullable=False), sa.Column('item_id', sa.Integer(), nullable=True), sa.Column('num', sa.Integer(), nullable=True), sa.Column('date', sa.Date(), nullable=True), sa.Column('start', sa.Time(), nullable=True), sa.Column('end', sa.Time(), nullable=True), sa.ForeignKeyConstraint( ['item_id'], ['bra_item.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('materials', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=True), sa.Column('type', sa.Integer(), nullable=True), sa.Column('item_id', sa.Integer(), nullable=True), sa.Column('code', sa.Text(), nullable=True), sa.Column('props', sa.PickleType(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.Column('creator_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['creator_id'], ['user.id'], ), sa.ForeignKeyConstraint( ['item_id'], ['bra_item.id'], ), sa.PrimaryKeyConstraint('id'))
def downgrade(): # use batch_alter_table to support SQLite workaround with op.batch_alter_table("xcom") as batch_op: batch_op.alter_column('value', type_=sa.PickleType(pickler=dill))
def upgrade(): op.add_column("feed", sa.Column("filters", sa.PickleType(), nullable=True))
class Host(BASE): __tablename__ = 'host' id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.Text(), nullable=False) site_id = sa.Column( sa.Integer, sa.ForeignKey('site.id'), nullable=True) robot_email = sa.Column(sa.Text(), nullable=True) admin_active = sa.Column(sa.Boolean(), default=True, nullable=False) user_active = sa.Column(sa.Boolean(), default=True, nullable=False) country = sa.Column(sa.Text(), nullable=False) bandwidth_int = sa.Column(sa.Integer, default=100, nullable=True) comment = sa.Column(sa.Text(), nullable=True) config = deferred(sa.Column(sa.PickleType(), nullable=True)) last_checked_in = sa.Column(sa.DateTime, nullable=True, default=None) last_crawled = sa.Column(sa.DateTime, nullable=True, default=None) private = sa.Column(sa.Boolean(), default=False, nullable=False) internet2 = sa.Column(sa.Boolean(), default=False, nullable=False) internet2_clients = sa.Column(sa.Boolean(), default=False, nullable=False) asn = sa.Column(sa.Integer, default=None, nullable=True) asn_clients = sa.Column(sa.Boolean(), default=True, nullable=False) max_connections = sa.Column(sa.Integer, default=1, nullable=False) last_crawl_duration = sa.Column(sa.BigInteger, default=0, nullable=True) # Count the last consecutive crawl failures. # This can be used to auto disable a host if the crawler fails # multiple times in a row. crawl_failures = deferred(sa.Column(sa.Integer, default=0, nullable=False)) # Add a text field to specify why the mirror was disabled. # This can either be filled by the crawler for auto disable due # to crawl failures, or by an admin (e.g., ticket number) disable_reason = deferred(sa.Column(sa.Text(), nullable=True)) # If SSH based push mirroring will ever be implemented # this field should contain the private key to connect to the # destination host push_ssh_private_key = deferred(sa.Column(sa.Text(), nullable=True)) # The host to contact for push mirroring push_ssh_host = deferred(sa.Column(sa.Text(), nullable=True)) # The command to execute on the destination host for push mirroring push_ssh_command = deferred(sa.Column(sa.Text(), nullable=True)) # This field holds information about the last few crawls. # Which protocols were used, crawl duration, ... last_crawls = deferred(sa.Column(sa.PickleType(), nullable=True)) # Relations site = relation( 'Site', foreign_keys=[site_id], remote_side=[Site.id], backref=backref( 'hosts', cascade="delete, delete-orphan", single_parent=True ) ) # exclusive_dirs = MultipleJoin('DirectoryExclusiveHost') # locations = SQLRelatedJoin('Location') # countries = SQLRelatedJoin('Country') # Constraints __table_args__ = ( sa.UniqueConstraint( 'site_id', 'name', name='host_idx'), ) def __repr__(self): ''' Return a string representation of the object. ''' return '<Host(%s - %s)>' % (self.id, self.name) def __json__(self): return dict( id=self.id, name=self.name, site=dict( id=self.site.id, name=self.site.name, ), admin_active=self.admin_active, user_active=self.user_active, country=self.country, bandwidth_int=self.bandwidth_int, comment=self.comment, last_checked_in=self.last_checked_in, last_crawled=self.last_crawled, private=self.private, internet2=self.internet2, internet2_clients=self.internet2_clients, asn=self.asn, asn_clients=self.asn_clients, max_connections=self.max_connections, last_crawl_duration=self.last_crawl_duration, ) def set_not_up2date(self, session): for hc in self.categories: for hcd in hc.directories: hcd.up2date = False session.commit() def is_active(self): return self.admin_active \ and self.user_active \ and self.site.user_active
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('logLevel', sa.Column('id', sa.Integer(), nullable=False), sa.Column('level', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_logLevel_level'), 'logLevel', ['level'], unique=False) op.create_table('logType', sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_logType_type'), 'logType', ['type'], unique=False) op.create_table('sensor', sa.Column('id', sa.Integer(), nullable=False), sa.Column('model', sa.String(length=32), nullable=False), sa.Column('address', sa.Integer(), nullable=True), sa.Column('i2c_channel', sa.Integer(), nullable=True), sa.Column('isEnabled', sa.Integer(), nullable=True), sa.Column('isConnected', sa.Integer(), nullable=True), sa.Column('last_update', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('setting', sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.String(length=256), nullable=False), sa.Column('name', sa.String(length=256), nullable=False), sa.Column('value', sa.String(length=256), nullable=False), sa.Column('last_modified', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=64), nullable=False), sa.Column('email', sa.String(length=120), nullable=False), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column('token', sa.String(length=32), nullable=True), sa.Column('token_expiration', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True) op.create_index(op.f('ix_user_token'), 'user', ['token'], unique=True) op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True) op.create_table('alert_log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('timestamp_triggered', sa.DateTime(), nullable=False), sa.Column('threshold', sa.Integer(), nullable=False), sa.Column('value', sa.Float(), nullable=False), sa.Column('type', sa.String(), nullable=True), sa.Column('timestamp_cleared', sa.DateTime(), nullable=True), sa.Column('sensor_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['sensor_id'], ['sensor.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_alert_log_timestamp_cleared'), 'alert_log', ['timestamp_cleared'], unique=False) op.create_index(op.f('ix_alert_log_timestamp_triggered'), 'alert_log', ['timestamp_triggered'], unique=False) op.create_index(op.f('ix_alert_log_type'), 'alert_log', ['type'], unique=False) op.create_table('log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('timestamp', sa.DateTime(), nullable=False), sa.Column('message', sa.String(length=256), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('logType_id', sa.Integer(), nullable=True), sa.Column('logLevel_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['logLevel_id'], ['logLevel.id'], ), sa.ForeignKeyConstraint(['logType_id'], ['logType.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_log_timestamp'), 'log', ['timestamp'], unique=False) op.create_table('reading_temperature', sa.Column('id', sa.Integer(), nullable=False), sa.Column('timestamp', sa.DateTime(), nullable=False), sa.Column('value_min', sa.Float(), nullable=False), sa.Column('value_max', sa.Float(), nullable=False), sa.Column('value_avg', sa.Float(), nullable=True), sa.Column('pixel_array', sa.PickleType(), nullable=True), sa.Column('sensor_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['sensor_id'], ['sensor.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_reading_temperature_timestamp'), 'reading_temperature', ['timestamp'], unique=False) op.create_index(op.f('ix_reading_temperature_value_avg'), 'reading_temperature', ['value_avg'], unique=False) op.create_index(op.f('ix_reading_temperature_value_max'), 'reading_temperature', ['value_max'], unique=False) op.create_index(op.f('ix_reading_temperature_value_min'), 'reading_temperature', ['value_min'], unique=False)
class Directory(BASE): __tablename__ = 'directory' id = sa.Column(sa.Integer, primary_key=True) # Full path # e.g. pub/epel # e.g. pub/fedora/linux name = sa.Column(sa.Text(), nullable=False, unique=True) files = sa.Column(sa.PickleType(), nullable=True) readable = sa.Column(sa.Boolean(), default=True, nullable=False) ctime = sa.Column(sa.BigInteger, default=0, nullable=True) def __repr__(self): ''' Return a string representation of the object. ''' return '<Directory(%s - %s)>' % (self.id, self.name) @classmethod def age_file_details(cls, session, config): cls._fill_file_details_cache(session, config) cls._age_file_details(session, config) @classmethod def _fill_file_details_cache(cls, session, config): cache = collections.defaultdict(list) sql = sa.text( 'SELECT id, directory_id, filename, timestamp from file_detail ' 'ORDER BY directory_id, filename, -timestamp') results = session.execute(sql) for (id, directory_id, filename, timestamp) in results: k = (directory_id, filename) v = dict(file_detail_id=id, timestamp=timestamp) cache[k].append(v) cls.file_details_cache = cache @classmethod def _age_file_details(cls, session, config): """For each file, keep at least 1 FileDetail entry. Remove the second-most recent entry if the most recent entry is older than max_propogation_days. This gives mirrors time to pick up the most recent change. Remove any others that are more than max_stale_days old. """ t = int(time.time()) max_stale = config.get('mirrormanager.max_stale_days', 3) max_propogation = config.get('mirrormanager.max_propogation_days', 2) stale = t - (60 * 60 * 24 * max_stale) propogation = t - (60 * 60 * 24 * max_propogation) for k, fds in cls.file_details_cache.iteritems(): (directory_id, filename) = k if len(fds) > 1: start = 2 # second-most recent only if most recent has had time to # propogate if fds[0]['timestamp'] < propogation: start = 1 # all others for f in fds[start:]: if f['timestamp'] < stale: detail = FileDetail.get(session, f['file_detail_id']) session.delete(detail) session.commit()
def upgrade(): conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() if 'connection' not in tables: op.create_table( 'connection', sa.Column('id', sa.Integer(), nullable=False), sa.Column('conn_id', sa.String(length=250), nullable=True), sa.Column('conn_type', sa.String(length=500), nullable=True), sa.Column('host', sa.String(length=500), nullable=True), sa.Column('schema', sa.String(length=500), nullable=True), sa.Column('login', sa.String(length=500), nullable=True), sa.Column('password', sa.String(length=500), nullable=True), sa.Column('port', sa.Integer(), nullable=True), sa.Column('extra', sa.String(length=5000), nullable=True), sa.PrimaryKeyConstraint('id')) if 'dag' not in tables: op.create_table( 'dag', sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('is_paused', sa.Boolean(), nullable=True), sa.Column('is_subdag', sa.Boolean(), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=True), sa.Column('last_scheduler_run', sa.DateTime(), nullable=True), sa.Column('last_pickled', sa.DateTime(), nullable=True), sa.Column('last_expired', sa.DateTime(), nullable=True), sa.Column('scheduler_lock', sa.Boolean(), nullable=True), sa.Column('pickle_id', sa.Integer(), nullable=True), sa.Column('fileloc', sa.String(length=2000), nullable=True), sa.Column('owners', sa.String(length=2000), nullable=True), sa.PrimaryKeyConstraint('dag_id')) if 'dag_pickle' not in tables: op.create_table( 'dag_pickle', sa.Column('id', sa.Integer(), nullable=False), sa.Column('pickle', sa.PickleType(), nullable=True), sa.Column('created_dttm', sa.DateTime(), nullable=True), sa.Column('pickle_hash', sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint('id')) if 'import_error' not in tables: op.create_table( 'import_error', sa.Column('id', sa.Integer(), nullable=False), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('filename', sa.String(length=1024), nullable=True), sa.Column('stacktrace', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id')) if 'job' not in tables: op.create_table( 'job', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('job_type', sa.String(length=30), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('latest_heartbeat', sa.DateTime(), nullable=True), sa.Column('executor_class', sa.String(length=500), nullable=True), sa.Column('hostname', sa.String(length=500), nullable=True), sa.Column('unixname', sa.String(length=1000), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index('job_type_heart', 'job', ['job_type', 'latest_heartbeat'], unique=False) if 'known_event_type' not in tables: op.create_table( 'known_event_type', sa.Column('id', sa.Integer(), nullable=False), sa.Column('know_event_type', sa.String(length=200), nullable=True), sa.PrimaryKeyConstraint('id')) if 'log' not in tables: op.create_table( 'log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dttm', sa.DateTime(), nullable=True), sa.Column('dag_id', sa.String(length=250), nullable=True), sa.Column('task_id', sa.String(length=250), nullable=True), sa.Column('event', sa.String(length=30), nullable=True), sa.Column('execution_date', sa.DateTime(), nullable=True), sa.Column('owner', sa.String(length=500), nullable=True), sa.PrimaryKeyConstraint('id')) if 'sla_miss' not in tables: op.create_table( 'sla_miss', sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('email_sent', sa.Boolean(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date')) if 'slot_pool' not in tables: op.create_table('slot_pool', sa.Column('id', sa.Integer(), nullable=False), sa.Column('pool', sa.String(length=50), nullable=True), sa.Column('slots', sa.Integer(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('pool')) if 'task_instance' not in tables: op.create_table( 'task_instance', sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('try_number', sa.Integer(), nullable=True), sa.Column('hostname', sa.String(length=1000), nullable=True), sa.Column('unixname', sa.String(length=1000), nullable=True), sa.Column('job_id', sa.Integer(), nullable=True), sa.Column('pool', sa.String(length=50), nullable=True), sa.Column('queue', sa.String(length=50), nullable=True), sa.Column('priority_weight', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date')) op.create_index('ti_dag_state', 'task_instance', ['dag_id', 'state'], unique=False) op.create_index('ti_pool', 'task_instance', ['pool', 'state', 'priority_weight'], unique=False) op.create_index('ti_state_lkp', 'task_instance', ['dag_id', 'task_id', 'execution_date', 'state'], unique=False) if 'user' not in tables: op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=250), nullable=True), sa.Column('email', sa.String(length=500), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username')) if 'variable' not in tables: op.create_table('variable', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', sa.String(length=250), nullable=True), sa.Column('val', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('key')) if 'chart' not in tables: op.create_table( 'chart', sa.Column('id', sa.Integer(), nullable=False), sa.Column('label', sa.String(length=200), nullable=True), sa.Column('conn_id', sa.String(length=250), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('chart_type', sa.String(length=100), nullable=True), sa.Column('sql_layout', sa.String(length=50), nullable=True), sa.Column('sql', sa.Text(), nullable=True), sa.Column('y_log_scale', sa.Boolean(), nullable=True), sa.Column('show_datatable', sa.Boolean(), nullable=True), sa.Column('show_sql', sa.Boolean(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.Column('default_params', sa.String(length=5000), nullable=True), sa.Column('x_is_date', sa.Boolean(), nullable=True), sa.Column('iteration_no', sa.Integer(), nullable=True), sa.Column('last_modified', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) if 'known_event' not in tables: op.create_table( 'known_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('label', sa.String(length=200), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('known_event_type_id', sa.Integer(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.ForeignKeyConstraint( ['known_event_type_id'], ['known_event_type.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) if 'xcom' not in tables: op.create_table( 'xcom', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', sa.String(length=512), nullable=True), sa.Column('value', sa.PickleType(), nullable=True), sa.Column('timestamp', sa.DateTime(), default=func.now(), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=False), sa.PrimaryKeyConstraint('id')) if 'task_state' not in tables: op.create_table( 'task_state', sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('task_state', sa.PickleType(), nullable=True), sa.Column('event_handler', sa.PickleType(), nullable=True), sa.Column('action', sa.String(length=32), nullable=True), sa.Column('ack_id', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date')) if 'task_execution' not in tables: op.create_table( 'task_execution', sa.Column('task_id', sa.String(length=250), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('seq_num', sa.Integer(), nullable=False), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('try_number', sa.Integer(), nullable=True), sa.Column('hostname', sa.String(length=1000), nullable=True), sa.Column('job_id', sa.Integer(), nullable=True), sa.Column('pool', sa.String(length=50), nullable=True), sa.Column('queue', sa.String(length=50), nullable=True), sa.Column('pool_slots', sa.Integer, default=1), sa.Column('pid', sa.Integer(), nullable=True), sa.Column('queued_dttm', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date', 'seq_num')) if conn.dialect.name == "mysql": from sqlalchemy.dialects import mysql with op.batch_alter_table('task_state') as task_state_batch_op: task_state_batch_op.alter_column(column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False) with op.batch_alter_table('task_execution') as task_execution_batch_op: task_execution_batch_op.alter_column(column_name='execution_date', type_=mysql.DATETIME(fsp=6), nullable=False) task_execution_batch_op.alter_column(column_name='start_date', type_=mysql.DATETIME(fsp=6)) task_execution_batch_op.alter_column(column_name='end_date', type_=mysql.DATETIME(fsp=6)) task_execution_batch_op.alter_column(column_name='queued_dttm', type_=mysql.DATETIME(fsp=6))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'department', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('name', sa.String(length=120), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_department_id'), 'department', ['id'], unique=True) op.create_index(op.f('ix_department_name'), 'department', ['name'], unique=True) op.create_table('server_status', sa.Column('date', sa.Date(), nullable=False), sa.Column('url', sa.String(), nullable=True), sa.Column('status', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('date')) op.create_index(op.f('ix_server_status_date'), 'server_status', ['date'], unique=False) op.create_index(op.f('ix_server_status_url'), 'server_status', ['url'], unique=False) op.create_table( 'user', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('username', sa.String(length=64), nullable=True), sa.Column('email', sa.String(length=120), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True) op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=True) op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True) op.create_table( 'employees', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('first_name', sa.String(length=30), nullable=True), sa.Column('last_name', sa.String(length=30), nullable=True), sa.Column('service_number', sa.String(length=30), nullable=True), sa.Column('department_id', sa.Integer(), nullable=True), sa.Column('encodings', sa.PickleType(), nullable=True), sa.Column('photo', sa.String(length=128), nullable=True), sa.ForeignKeyConstraint( ['department_id'], ['department.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('encodings'), sa.UniqueConstraint('id')) op.create_index(op.f('ix_employees_first_name'), 'employees', ['first_name'], unique=False) op.create_index(op.f('ix_employees_last_name'), 'employees', ['last_name'], unique=False) op.create_index(op.f('ix_employees_service_number'), 'employees', ['service_number'], unique=True) op.create_table( 'work_shift', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('employee_id', sa.Integer(), nullable=True), sa.Column('arrival_time', sa.DateTime(), nullable=True), sa.Column('depature_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['employee_id'], ['employees.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_work_shift_arrival_time'), 'work_shift', ['arrival_time'], unique=False) op.create_index(op.f('ix_work_shift_depature_time'), 'work_shift', ['depature_time'], unique=False) op.create_index(op.f('ix_work_shift_id'), 'work_shift', ['id'], unique=True)