class Role(db.Model, RoleMixin): id = db.Column(postgresql.INTEGER, primary_key=True) name = db.Column(postgresql.VARCHAR(12), unique=True) description = db.Column(postgresql.VARCHAR(255)) def __repr__(self): return '<Role %r>' % self.name
class Author(Base): __tablename__ = 'author' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, ) name = sa.Column( postgresql_types.VARCHAR(length=255), primary_key=False, unique=False, nullable=False, ) age = sa.Column( postgresql_types.SMALLINT(), primary_key=False, unique=False, nullable=False, ) email = sa.Column( postgresql_types.VARCHAR(length=254), primary_key=False, unique=False, nullable=True, )
class Permission(Base): __tablename__ = 'auth_permission' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, ) name = sa.Column( postgresql_types.VARCHAR(length=255), primary_key=False, unique=False, nullable=False, ) content_type_id = sa.Column( postgresql_types.INTEGER(), sa.ForeignKey(column="django_content_type.id", ondelete="CASCADE"), primary_key=False, unique=False, nullable=False, autoincrement=True, ) codename = sa.Column( postgresql_types.VARCHAR(length=100), primary_key=False, unique=False, nullable=False, ) content_type = sa.orm.relationship( 'ContentType', foreign_keys="[auth_permission.c.content_type_id]", remote_side=None, )
class Parameters(Base): __tablename__= 'parameters' param_id = Column(BigInteger, Sequence('param_id_seq'), primary_key=True, nullable=False) config_id = Column(Integer, ForeignKey('configurations.config_id')) name = Column(postgresql.VARCHAR(64),nullable=False) value = Column(postgresql.VARCHAR(64),nullable=False) def __init__(self, name='any', value=0): self.name = str(name) self.value = str(value) def create(self,param={'any':1}): params = [] for item in param.items(): params.append(Parameters(item[0],item[1])) session.add_all(params) session.commit() return params @staticmethod def getParams(config_id): with engine.connect() as conn: _df = pd.read_sql_query(select([Parameters]) .where(Parameters.config_id==int(config_id)) ,conn) return dict(zip(_df['name'],_df['value']))
class Options(Base): __tablename__= 'options' opt_id = Column(BigInteger, Sequence('opt_id_seq'), primary_key=True, nullable=False) target_id = Column(Integer, ForeignKey('targets.target_id')) dp_id = Column(Integer, ForeignKey('data_products.dp_id')) job_id = Column(Integer, ForeignKey('jobs.job_id')) event_id = Column(Integer, ForeignKey('events.event_id')) name = Column(postgresql.VARCHAR(64),nullable=False) value = Column(postgresql.VARCHAR(64),nullable=False) def __init__(self, name='any', value=0): self.name = str(name) self.value = str(value) def create(self,opt={'any':1}): opts = [] for item in opt.items(): opts.append(Options(item[0],item[1])) session.add_all(opts) session.commit() return opts
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "bvp_users", sa.Column( "current_login_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True, ), ) op.add_column( "bvp_users", sa.Column("last_login_ip", postgresql.VARCHAR(100), autoincrement=False, nullable=True), ) op.add_column( "bvp_users", sa.Column( "current_login_ip", postgresql.VARCHAR(100), autoincrement=False, nullable=True, ), ) op.add_column( "bvp_users", sa.Column("confirmed_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), )
class Mask(BaseMixin,Base): __tablename__= 'masks' mask_id = Column(Integer, Sequence('mask_id_seq'), primary_key=True, nullable=False) task_id = Column(Integer, ForeignKey('tasks.task_id')) source = Column(postgresql.VARCHAR(64),nullable=False) name = Column(postgresql.VARCHAR(64),nullable=False) value = Column(postgresql.VARCHAR(64),nullable=False) timestamp = Column(DateTime, default=func.now()) def __init__(self,source='',name='',value=''): self.source = str(source) self.name = str(name) self.value = str(value) @staticmethod def get(mask_id,how='sql'): if how=='sql': rs = session.query(Mask).get(int(mask_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Mask]) .where(Mask.mask_id==int(mask_id)) ,conn) rs = rs.iloc[0] return rs
class ContentType(Base): __tablename__ = 'django_content_type' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, doc="testtest", ) app_label = sa.Column( postgresql_types.VARCHAR(length=100), primary_key=False, unique=False, nullable=False, doc="testtest", ) model = sa.Column( postgresql_types.VARCHAR(length=100), primary_key=False, unique=False, nullable=False, doc="testtest", )
class Target(BaseMixin,Base): __tablename__= 'targets' target_id = Column(Integer, Sequence('target_id_seq'), primary_key=True, nullable=False) name = Column(postgresql.VARCHAR(64),nullable=False) pipeline_id = Column(Integer, ForeignKey('pipelines.pipeline_id')) configurations = relationship('Configuration', backref=backref('targets', uselist=True,passive_updates=False, cascade='delete,all')) options = relationship('Options', backref=backref('targets', uselist=True,passive_updates=False, cascade='delete,all')) relativepath = Column(postgresql.VARCHAR(256)) timestamp = Column(DateTime, default=func.now()) def __init__(self,name='any'): self.name = str(name) def add_config(self,obj,create_dir=False): self.configurations.append(obj) obj.add_paths(self.target_id, create_dir) session.commit() return def add_options(self,obj): for opt in obj: self.options.append(opt) session.commit() return def add_paths(self,pipeline_id,create_dir=False): pipeline = Pipeline.get(int(pipeline_id)) self.relativepath = str(pipeline.data_root)+'/'+str(self.name) if create_dir: _t = subprocess.run(['mkdir', '-p', str(self.relativepath)], stdout=subprocess.PIPE) return @staticmethod def get(target_id,how='sql'): if how=='sql': rs = session.query(Target).get(int(target_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Target]) .where(Target.target_id==int(target_id)) ,conn) rs = rs.iloc[0] return rs
class ArtifactSetMember(Base): __tablename__ = 'artifact_set_members' set_id = sa.Column( pg.VARCHAR(SHA1_LENGTH), #sa.ForeignKey("artifact_sets.set_id"), primary_key=True) artifact_id = sa.Column( pg.VARCHAR(SHA1_LENGTH), #sa.ForeignKey("artifacts.id"), primary_key=True)
class Countries(db.Model): __tablename__ = 'Countries' country_id = Column('COUNTRY_ID', Integer, primary_key=True) country_iso_code = Column('COUNTRY_ISO_CODE', Integer) country_name = Column('COUNTRY_NAME', postgresql.VARCHAR(100)) country_state_name = Column('COUNTRY_STATE_NAME', postgresql.VARCHAR(150)) country_phone_code = Column('COUNTRY_PHONE_CODE', postgresql.VARCHAR(8)) applicants = relationship('Applicants', backref='country')
class Applicants(db.Model): __tablename__ = "Applicants" appl_id = Column('APPL_ID', Integer, primary_key=True) appl_created = Column('APPL_CREATED', DateTime, default=datetime.datetime.utcnow) appl_first_name = Column('APPL_FIRST_NAME', postgresql.VARCHAR(50)) appl_middle_name = Column('APPL_MIDDLE_NAME', postgresql.VARCHAR(50), nullable=True) appl_last_name = Column('APPL_LAST_NAME', postgresql.VARCHAR(100)) appl_father_name = Column('APPL_FATHER_NAME', postgresql.VARCHAR(50)) appl_gender = Column('APPL_GENDER', postgresql.CHAR(1), CheckConstraint("APPL_GENDER in ('M', 'F')")) appl_date_of_birth = Column('APPL_DATE_OF_BIRTH', postgresql.DATE) appl_phone_num = Column('APPL_PHONE_NUM', postgresql.VARCHAR(10)) appl_email = Column('APPL_EMAIL', postgresql.VARCHAR(100)) appl_city = Column('APPL_CITY', postgresql.VARCHAR(100)) country_ID = Column('COUNTRY_ID', Integer, ForeignKey('Countries.COUNTRY_ID')) appl_username = Column('APPL_USERNAME', postgresql.VARCHAR(100)) appl_password = Column('APPL_PASSWORD', postgresql.VARCHAR(500)) # TODO: decide hash algo def __str__(self): return "<{}|{}:{}>".format(self.appl_id, self.appl_username, self.appl_email)
class Book(Base): __tablename__ = 'book' id = sa.Column( postgresql_types.UUID(), default=GET_DEFAULT('books.models.Book.id'), primary_key=True, unique=True, nullable=False, ) price = sa.Column( postgresql_types.JSONB(), primary_key=False, unique=False, nullable=False, ) title = sa.Column( postgresql_types.VARCHAR(length=255), primary_key=False, unique=False, nullable=False, ) description = sa.Column( postgresql_types.TEXT(), primary_key=False, unique=False, nullable=True, ) author_id = sa.Column( postgresql_types.INTEGER(), sa.ForeignKey(column="author.id", ondelete="SET_NULL"), primary_key=False, unique=False, nullable=True, autoincrement=True, ) content = sa.Column( postgresql_types.BYTEA(), primary_key=False, unique=False, nullable=False, ) tags = sa.Column( postgresql_types.ARRAY(item_type=postgresql_types.VARCHAR, dimensions=1), primary_key=False, unique=False, nullable=False, ) author = sa.orm.relationship( 'Author', foreign_keys="[book.c.author_id]", remote_side=None, ) category = sa.orm.relationship( 'Category', secondary="book_category", foreign_keys="[book_category.c.book_id, book_category.c.category_id]", remote_side=None, )
def downgrade(): op.execute('ALTER TABLE "Tasks" DROP CONSTRAINT "Tasks_good_id_fkey"') op.alter_column(u'Task_Dependencies', 'gap_unit', existing_type=postgresql.VARCHAR(length=256), nullable=True) op.alter_column(u'Task_Dependencies', 'gap_timing', existing_type=postgresql.DOUBLE_PRECISION(precision=53), nullable=True) op.add_column( u'Projects', sa.Column('client_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.create_foreign_key(u'Projects_client_id_fkey', 'Projects', 'Clients', ['client_id'], ['id']) # before dropping the Project_Clients, add the first client as the # Project.client_id op.execute(""" update "Projects" set client_id = ( select client_id from "Project_Clients" where project_id = "Projects".id limit 1 ) """) op.drop_table('Project_Clients')
class Group(Base): __tablename__ = 'auth_group' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, doc="testtest", ) name = sa.Column( postgresql_types.VARCHAR(length=150), primary_key=False, unique=True, nullable=False, doc="testtest", ) permissions = sa.orm.relationship( 'Permission', secondary="auth_group_permissions", foreign_keys= "[auth_group_permissions.c.group_id, auth_group_permissions.c.permission_id]", remote_side=None, backref="group", )
class ArtifactSet(Base): __tablename__ = 'artifact_sets' id = sa.Column(pg.INTEGER, primary_key=True) set_id = sa.Column(pg.VARCHAR(SHA1_LENGTH)) labels = sa.Column(pg.JSONB) created_at = sa.Column(pg.TIMESTAMP, default=datetime.utcnow) def __init__(self, artifact_set): self.set_id = artifact_set.id labels = artifact_set.labels if isinstance(artifact_set.labels, str): labels = {'name': artifact_set.labels} self.labels = labels self.created_at = artifact_set.created_at @memoized_property def props(self): return { 'id': self.set_id, 'labels': self.labels, 'created_at': self.created_at } def __repr__(self): return '<ArtifactSet %r, %r>' % (self.set_id, self.labels)
class User(BaseMixin,Base): __tablename__= 'users' user_id = Column(Integer, Sequence('user_id_seq'), primary_key=True, nullable=False) name = Column(postgresql.VARCHAR(32),nullable=False) pipelines = relationship('Pipeline', backref=backref('users', uselist=True,passive_updates=False, cascade='delete,all')) timestamp = Column(DateTime, default=func.now()) def __init__(self,name='any'): self.name = str(name) def add_pipeline(self,obj): self.pipelines.append(obj) session.commit() return @staticmethod def get(user_name,how='sql'): if how=='sql': rs = session.query(User)\ .filter_by(name=str(user_name)).one() elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([User]) .where(User.name==str(user_name)) ,conn) rs = rs.iloc[0] return rs
def get_account_status_table(): metadata = MetaData() table = Table( 'account_status', metadata, Column('uname', pgsql.VARCHAR(255)), Column('is_login', pgsql.BOOLEAN, nullable=False), Column('last_login_time', pgsql.TIME, nullable=False), Column('chost', pgsql.ARRAY(pgsql.BIGINT), nullable=False)) return table
def get_account_bind_table(name): metadata = MetaData() table = Table( name, metadata, Column('uuid', pgsql.VARCHAR(24), nullable=False, primary_key=True), Column('pwd', pgsql.BYTEA), Column('reg_time', pgsql.TIME, nullable=False)) return (table, metadata)
class Job(BaseMixin,Base): __tablename__= 'jobs' job_id = Column(BigInteger, Sequence('job_id_seq'), primary_key=True, nullable=False) task_id = Column(Integer, ForeignKey('tasks.task_id')) config_id = Column(Integer, ForeignKey('configurations.config_id')) options = relationship('Options', backref=backref('jobs', uselist=True,passive_updates=False, cascade='delete,all')) events = relationship('Event', secondary='job_event_link') nodes = relationship('Node', secondary='job_node_link') state = Column(postgresql.VARCHAR(64),nullable=False) starttime = Column(DateTime, default=func.now()) endtime = Column(DateTime, default=func.now()) timestamp = Column(DateTime, default=func.now()) def __init__(self,state='new'): self.state = str(state) def add_options(self,obj): for opt in obj: self.options.append(opt) session.commit() return def add_event(self,obj): self.events.append(obj) session.commit() return def add_node(self,obj): self.nodes.append(obj) session.commit() return @staticmethod def get(job_id,how='sql'): if how=='sql': rs = session.query(Job).get(int(job_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Job]) .where(Job.job_id==int(job_id)) ,conn) rs = rs.iloc[0] return rs
def get_account_table(): metadata = MetaData() account = Table('%08x' % 0,metadata, Column('uuid',pgsql.UUID,primary_key=True), Column('uname',pgsql.VARCHAR(255),primary_key=True), Column('pwd',pgsql.BYTEA), Column('is_active',pgsql.BOOLEAN,nullable=False), Column('reg_time',pgsql.TIME,nullable=False) ) return account
class Task(BaseMixin,Base): __tablename__= 'tasks' task_id = Column(Integer, Sequence('task_id_seq'), primary_key=True, nullable=False) name = Column(postgresql.VARCHAR(64),nullable=False) pipeline_id = Column(Integer, ForeignKey('pipelines.pipeline_id')) masks = relationship('Mask', backref=backref('tasks', uselist=True,passive_updates=False, cascade='delete,all')) jobs = relationship('Job', backref=backref('tasks', uselist=True,passive_updates=False, cascade='delete,all')) nruns = Column(Float) runtime = Column(Float) is_exclusive = Column(Boolean) timestamp = Column(DateTime, default=func.now()) def __init__(self,name='', nruns=0,run_time=0, is_exclusive=0): self.name = str(name) self.nruns = int(nruns) self.run_time = float(run_time) self.is_exclusive = bool(is_exclusive) def add_mask(self,obj): self.masks.append(obj) session.commit() return def add_job(self,obj): self.jobs.append(obj) session.commit() return @staticmethod def get(task_id,how='sql'): if how=='sql': rs = session.query(Task).get(int(task_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Task]) .where(Task.task_id==int(task_id)) ,conn) rs = rs.iloc[0] return rs
class Run(Base): __tablename__ = 'runs' id = sa.Column(pg.VARCHAR(SHA1_LENGTH), primary_key=True) hostname = sa.Column(pg.VARCHAR(256)) info = sa.Column(pg.JSONB) created_at = sa.Column(pg.TIMESTAMP, default=datetime.utcnow) artifacts = sqlalchemy.orm.relationship('Artifact') def __init__(self, info): self.id = info['id'] self.info = info self.hostname = info['host']['nodename'] self.created_at = info['created_at'] @memoized_property def info_with_datetimes(self): result = copy.copy(self.info) result['created_at'] = self.created_at return result
def upgrade(): op.create_table( 'artifact_set_members', sa.Column('set_id', sa.VARCHAR(length=40), nullable=False), sa.Column('artifact_id', sa.VARCHAR(length=40), nullable=False), sa.PrimaryKeyConstraint('set_id', 'artifact_id'), ) op.create_table( 'artifact_sets', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('set_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'runs', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('hostname', sa.VARCHAR(length=256), nullable=True), sa.Column('info', pg.JSONB(), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_table( 'artifacts', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('value_id', sa.VARCHAR(length=50), nullable=True), sa.Column('run_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('version', sa.INTEGER(), nullable=True), sa.Column('fn_module', sa.VARCHAR(length=100), nullable=True), sa.Column('fn_name', sa.VARCHAR(length=100), nullable=True), sa.Column('composite', sa.BOOLEAN(), nullable=True), sa.Column('value_id_duration', sa.FLOAT(), nullable=True), sa.Column('compute_duration', sa.FLOAT(), nullable=True), sa.Column('hash_duration', sa.FLOAT(), nullable=True), sa.Column('computed_at', pg.TIMESTAMP(), nullable=True), sa.Column('added_at', pg.TIMESTAMP(), nullable=True), sa.Column('input_artifact_ids', pg.ARRAY(pg.VARCHAR(length=40)), nullable=True), sa.Column('inputs_json', pg.JSONB(), nullable=True), sa.Column('serializer', sa.VARCHAR(length=128), nullable=True), sa.Column('load_kwargs', pg.JSONB(), nullable=True), sa.Column('dump_kwargs', pg.JSONB(), nullable=True), sa.Column('custom_fields', pg.JSONB(), nullable=True), sa.ForeignKeyConstraint( ['run_id'], ['runs.id'], ), sa.PrimaryKeyConstraint('id'), )
class Event(BaseMixin,Base): __tablename__= 'events' event_id = Column(BigInteger, Sequence('event_id_seq'), primary_key=True, nullable=False) jobs = relationship('Job', secondary='job_event_link') jargs = Column(postgresql.VARCHAR(64),nullable=False) name = Column(postgresql.VARCHAR(64),nullable=False) value = Column(postgresql.VARCHAR(64),nullable=False) options = relationship('Options', backref=backref('events', uselist=True,passive_updates=False, cascade='delete,all')) timestamp = Column(DateTime, default=func.now()) def __init__(self,name='any',value='',jargs=''): self.jargs = str(jargs) self.name = str(name) self.value = str(value) def add_job(self,obj): self.jobs.append(obj) session.commit() return @staticmethod def get(event_id,how='sql'): if how=='sql': rs = session.query(Event).get(int(event_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Event]) .where(Event.event_id==int(event_id)) ,conn) rs = rs.iloc[0] return rs
class Pipeline(BaseMixin,Base): __tablename__= 'pipelines' pipeline_id = Column(Integer, Sequence('pipeline_id_seq'), primary_key=True, nullable=False) name = Column(postgresql.VARCHAR(64),nullable=False) user_id = Column(Integer, ForeignKey('users.user_id')) targets = relationship('Target', backref=backref('pipelines', uselist=True,passive_updates=False, cascade='delete,all')) tasks = relationship('Task', backref=backref('pipelines', uselist=True,passive_updates=False, cascade='delete,all')) software_root = Column(postgresql.VARCHAR(256)) data_root = Column(postgresql.VARCHAR(256)) pipe_root = Column(postgresql.VARCHAR(256)) config_root = Column(postgresql.VARCHAR(256)) description = Column(postgresql.VARCHAR(512)) timestamp = Column(DateTime, default=func.now()) def __init__(self,name='any',software_root='', data_root='',pipe_root='',config_root='', description=''): self.name = str(name) self.software_root = str(software_root) self.data_root = str(data_root) self.pipe_root = str(pipe_root) self.config_root = str(config_root) self.description = str(description) def add_target(self,obj,create_dir=False): self.targets.append(obj) obj.add_paths(self.pipeline_id, create_dir) session.commit() return def add_task(self,obj): self.tasks.append(obj) session.commit() return @staticmethod def get(pipeline_id,how='sql'): if how=='sql': rs = session.query(Pipeline).get(int(pipeline_id)) elif how=='pd': with engine.connect() as conn: rs = pd.read_sql_query(select([Pipeline]) .where(Pipeline.pipeline_id==int(pipeline_id)) ,conn) rs = rs.iloc[0] return rs
class ArtifactSet(Base): __tablename__ = 'artifact_sets' id = sa.Column(pg.INTEGER, primary_key=True) set_id = sa.Column(pg.VARCHAR(SHA1_LENGTH)) name = sa.Column(pg.VARCHAR(1000)) created_at = sa.Column(pg.TIMESTAMP, default=datetime.utcnow) def __init__(self, artifact_set): self.set_id = artifact_set.id self.name = artifact_set.name self.created_at = artifact_set.created_at @memoized_property def props(self): return { 'id': self.set_id, 'name': self.name, 'created_at': self.created_at } def __repr__(self): return '<ArtifactSet %r, %r>' % (self.set_id, self.name)
class CategoryRelation(Base): __tablename__ = 'category_relation' id = sa.Column( postgresql_types.INTEGER(), primary_key=True, unique=True, nullable=False, autoincrement=True, doc="testtest", ) category1_id = sa.Column( postgresql_types.INTEGER(), sa.ForeignKey(column="category.id", ondelete="CASCADE"), primary_key=False, unique=False, nullable=False, autoincrement=True, doc="testtest", ) category2_id = sa.Column( postgresql_types.INTEGER(), sa.ForeignKey(column="category.id", ondelete="CASCADE"), primary_key=False, unique=False, nullable=False, autoincrement=True, doc="testtest", ) type = sa.Column( postgresql_types.VARCHAR(length=30), primary_key=False, unique=False, nullable=True, doc="testtest", ) category1 = sa.orm.relationship( 'Category', foreign_keys="[category_relation.c.category1_id]", remote_side=None, backref="parents", ) category2 = sa.orm.relationship( 'Category', foreign_keys="[category_relation.c.category2_id]", remote_side=None, backref="children", )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('tags_messages') op.alter_column('evenements', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('messages', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('user_evenement_roles', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('messages_reactions', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('locations', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('groups', 'type', type_=postgresql.VARCHAR(60)) op.alter_column('positions_groups', 'group_type', type_=postgresql.VARCHAR(60))
class Session(Base): __tablename__ = 'django_session' session_key = sa.Column( postgresql_types.VARCHAR(length=40), primary_key=True, unique=True, nullable=False, ) session_data = sa.Column( postgresql_types.TEXT(), primary_key=False, unique=False, nullable=False, ) expire_date = sa.Column( postgresql_types.TIMESTAMP(), primary_key=False, unique=False, nullable=False, )