class Task(Base): # Seems unnecessary, but doing this makes the timestamp mockable. def get_datetime(self): return datetime.now() __tablename__ = 'tasks' id = Column(Integer, primary_key=True) assigner_id = Column(String(length=40), nullable=False, index=True) assignee_id = Column(String(length=40), nullable=False, index=True) description = Column(String, nullable=False) created_timestamp = Column(TIMESTAMP(timezone=True), default=get_datetime) __table_args__ = (Index('assignee_id', assignee_id.desc()), ) def __repr__(self): return f"{self.description} (from <@{self.assigner_id}> on {self.created_timestamp.strftime('%b %-d')})" def __str__(self): return self.__repr__()
class Expense(db.Model): __tablename__ = 'expense' expense_id = Column(Integer, primary_key=True, autoincrement=True) title = Column(String(50)) total = Column(REAL, nullable=False) payer = Column(Integer, nullable=False) group_id = Column(Integer) split = Column(Enum(SplitType), nullable=False) timestamp = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.utcnow) __table_args__ = (ForeignKeyConstraint(['group_id'], ['group.group_id']), ForeignKeyConstraint(['payer'], ['user.user_id'])) users = relationship("ExpenseShare", back_populates="expense", cascade="delete")
class Article(Base): __tablename__ = 'articles_with_extra' uuid = Column(UUID(as_uuid=True), primary_key=True, default=uuid4) date = Column(TIMESTAMP(timezone=True), nullable=False, index=True) title = Column(String, nullable=False) content = Column(String, nullable=False) extra = Column(MutableDict.as_mutable(JSONB), nullable=False, default={}) __table_args__ = ( Index('ix_article_with_extra_extra_slug', extra['slug'].astext), Index('ix_article_with_extra_extra_tag', extra['tags'], postgresql_using='gin' ), ) def __repr__(self): return f"""# {self.title}
class Columns(Base): __tablename__ = 'Columns' RecId = Column(Integer, autoincrement=True, nullable=False, primary_key=True) Columnid = Column(String(64), index=True, primary_key=True, unique=True) ColumnName = Column(String(64), index=True) Permission = Column(String(32)) ColumnAuthor = Column(String(128)) Introduction = Column(String(1024)) HeadImg = Column(String(512)) FollowedCount = Column(Integer) ArticleCount = Column(Integer) Url = Column(String(512)) UpdateTime = Column(DateTime) CreateTime = Column(TIMESTAMP(True), nullable=False) LastModifyTime = Column(DateTime) pass
class Hold(Base, TimestampsMixin): # type: ignore __tablename__ = "holds" id = Column(Integer, primary_key=True) quantity = Column(Integer, nullable=False) ends_at = Column(TIMESTAMP(timezone=False), nullable=True, index=True) user_id = Column(Integer, ForeignKey("users.id"), nullable=False) user = relationship("User", back_populates="holds") event_id = Column(Integer, ForeignKey("events.id"), nullable=False, index=True) event = relationship("Event", back_populates="holds") __table_args__ = (UniqueConstraint("user_id", "event_id"),) def __repr__(self): return "<Hold id={} event_name={} qty={}>".format( self.id, self.event.name, self.quantity )
class Suspended(Base): id = Column(Integer, primary_key=True, index=True) fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) user_id = Column(Integer, ForeignKey("user.id"), nullable=False) date_suspended = Column(TIMESTAMP(timezone=True), nullable=False) suspender = relationship("User", backref=backref("suspensions", cascade="all, delete-orphan")) suspended_fact = relationship("Fact", backref=backref( "suspensions", cascade="all, delete-orphan")) def __init__(self, suspender: User, suspended_fact: Fact, date_suspended: datetime): self.suspended_fact = suspended_fact self.suspender = suspender self.date_suspended = date_suspended
class Record(Base): """An ODP record. This model represents a uniquely identifiable digital object and its associated metadata. """ __tablename__ = 'record' __table_args__ = ( ForeignKeyConstraint( ('schema_id', 'schema_type'), ('schema.id', 'schema.type'), name='record_schema_fkey', ondelete='RESTRICT', ), CheckConstraint( f"schema_type = '{SchemaType.metadata}'", name='record_schema_type_check', ), CheckConstraint( 'doi IS NOT NULL OR sid IS NOT NULL', name='record_doi_sid_check', ), ) id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4())) doi = Column(String, unique=True) sid = Column(String, unique=True) metadata_ = Column(JSONB, nullable=False) validity = Column(JSONB, nullable=False) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) collection_id = Column(String, ForeignKey('collection.id', onupdate='CASCADE', ondelete='RESTRICT'), nullable=False) collection = relationship('Collection') schema_id = Column(String, nullable=False) schema_type = Column(Enum(SchemaType), nullable=False) schema = relationship('Schema') # view of associated tags (one-to-many) tags = relationship('RecordTag', viewonly=True) _repr_ = 'id', 'doi', 'sid', 'collection_id', 'schema_id'
class Headline(Base): __tablename__ = 'headlines' article_id = Column(String, primary_key=True) t = Column(TIMESTAMP(timezone=True), nullable=False) headline = Column(String, nullable=False) isins = Column(postgresql.ARRAY(String), nullable=True, comment='International Securities Identification Number') countries = Column(postgresql.ARRAY(String), nullable=True) categories = Column(postgresql.ARRAY(String), nullable=True) keywords_headline = Column(String, nullable=True) keywords_article = Column(String, nullable=True) simple_headline = Column(String, nullable=True) tokens = Column(postgresql.ARRAY(String), nullable=True) tag_tokens = Column(postgresql.ARRAY(String), nullable=True) dictionary = Column(postgresql.UUID(as_uuid=True), nullable=True) is_used = Column(Boolean, nullable=True) phase = Column(String, nullable=True) def __init__(self, article_id: str, t: datetime, headline: str, isins: List[str], countries: List[str], categories: List[str], keywords_headline: List[str], keywords_article: List[str], simple_headline: str, tokens: List[str], tag_tokens: List[str], dictionary: str, is_used: bool, phase: str): self.article_id = article_id self.t = t self.headline = headline self.isins = isins self.countries = countries self.categories = categories self.keywords_headline = keywords_headline self.keywords_article = keywords_article self.simple_headline = simple_headline self.tokens = tokens self.tag_tokens = tag_tokens self.dictionary = dictionary self.is_used = is_used self.phase = phase
def __init__(self): self.types = { 'gvkey': INTEGER(), 'datacqtr': TIMESTAMP(), 'actual': BIGINT(), 'lightgbm_result': BIGINT(), 'y_type': TEXT(), 'qcut': BIGINT() } for qcut in [3]: # , 6, 9 self.db_max = self.best_iteration(y_type=args.y_type, qcut=qcut) for i in [39]: #range(len(self.db_max)) sql_result.update(self.db_max.iloc[i, :].to_dict()) space.update(self.db_max.iloc[i, 6:].to_dict()) space.update({'num_class': qcut, 'is_unbalance': True}) print('sql_result1: ', sql_result) self.step_load_data() self.step_lightgbm()
class Reported(Base): id = Column(Integer, primary_key=True, index=True) fact_id = Column(Integer, ForeignKey("fact.fact_id"), nullable=False) user_id = Column(Integer, ForeignKey("user.id"), nullable=False) date_reported = Column(TIMESTAMP(timezone=True), nullable=False) suggestion = Column(JSONB) reporter = relationship("User", backref=backref("reporteds", cascade="all, delete-orphan")) reported_fact = relationship("Fact", backref=backref("reporteds", cascade="all, delete-orphan")) def __init__(self, reporter: User, reported_fact: Fact, date_reported: datetime, suggestion: FactToReport): self.reported_fact = reported_fact self.reporter = reporter self.date_reported = date_reported self.suggestion = suggestion.dict()
class Stock(Base): """ Declarative base class 'Stock' to map to database table 'stocks' """ __tablename__ = 'stocks' added = Column(TIMESTAMP(timezone=False), nullable=False, default=datetime.now()) date = Column(Date, primary_key=True) open = Column(DECIMAL(12, 2)) close = Column(DECIMAL(12, 2)) high = Column(DECIMAL(12, 2)) low = Column(DECIMAL(12, 2)) volume = Column(Integer) change = Column(DECIMAL(5, 2)) changePercent = Column(DECIMAL(9, 6)) label = Column(String(length=10)) changeOverTime = Column(DECIMAL(9, 8))
class RedisServer(Base): #表名 __tablename__ = 'redisServer' #表结构 severId = Column(Integer, autoincrement=True, primary_key=True) serverIp = Column(String(16), nullable=False) createdBy = Column(String(64), nullable=False) dbId = Column(Integer, nullable=False) isDefault = Column(Boolean, nullable=False) createdTime = Column(TIMESTAMP(True), nullable=False) # 查询构造器、、、 query = db_session.query_property() def __init__(self, serverId, serverIp, serverPort, dbId, isDefault, createdBy): self.serverId = serverId self.serverIp = serverIp self.serverPort = serverPort self.dbId = dbId self.isDefault = isDefault self.createdBy = createdBy def __repr__(self): server_dict = { u"serverId": int(self.serverId), u"serverIp": self.serverIp, u"serverPort": self.serverPort, u"dbId": int(self.dbId) } return str(server_dict) def __dir__(self): server_dict = { u"serverId": int(self.serverId), u"serverIp": self.serverIp, u"serverPort": self.serverPort, u"dbId": int(self.dbId) } return server_dict
def __init__( self, core, *, beacon: NodeT = None, loop: asyncio.AbstractEventLoop = None, binding_keys: list = None, configure_rpc: bool = False, ) -> None: super(SqlBehav, self).__init__( core, beacon=beacon, loop=loop, binding_keys=binding_keys, configure_rpc=configure_rpc, ) self.db: Optional[Database] = None self.engine: Optional[Engine] = None self.metadata: Optional[MetaData] = None # TODO: Generalize example self.msg_types: Dict[str, Type[SerializableObject]] = { DemoData.__name__: DemoData } # ATTENTION: keep in sync with model.json_data definition !!! # allow already serialized json to be inserted in JSON column as text # as class variable overwrites tests while loading (startup-time vs. runtime) self.json_data = Table( "json_data", metadata, Column("id", Integer, primary_key=True), Column("ts", TIMESTAMP(timezone=True)), Column("sender", String(length=256)), Column("rmq_type", String(length=100)), Column("content_type", String(length=100)), Column("routing_key", String(length=256)), Column("data", Text), extend_existing= True, # allow redefinition to JSON column to send json string )
class Liability(Base): __tablename__ = "liabilities" # seq = Sequence('id', start=190) id = Column(Integer, autoincrement=True, primary_key=True) address = Column(String, unique=True) model = Column(String) model_data = Column(String) objective = Column(String) objective_data = Column(String) result = Column(String) result_data = Column(String) promisee = Column(String) promisor = Column(String) lighthouse = Column(String) # lighthouseFee = Column(String) token = Column(String) cost = Column(String) validator = Column(String) validatorFee = Column(String) # isSuccess = Column(Boolean) # isFinalized = Column(Boolean) timestamp = Column(TIMESTAMP(True), server_default=func.now()) def __repr__(self): return json.dumps({ "id": self.id, "address": self.address, "model": self.model, "objective": self.objective, "result": self.result, "promisee": self.promisee, "promisor": self.promisor, "lighthouse": self.lighthouse, "token": self.token, "cost": self.cost, "validator": self.validator, "validatorFee": self.validatorFee, })
class Community(Base): __tablename__ = "community" id = Column(String(36), primary_key=True) source = Column(String(10)) title = Column(String(64)) internal_id = Column(String(64)) district = Column(String(10)) address = Column(String(64)) total_buildings = Column(String(64)) total_houses = Column(String(64)) build_type = Column(String(64)) build_time = Column(String(64)) developer = Column(String(64)) property = Column(String(64)) property_fee = Column(String(64)) parking_num = Column(String(64)) green_rate = Column(String(10)) plot_rate = Column(String(10)) lat = Column(DECIMAL(32, 20)) lng = Column(DECIMAL(32, 20)) update_time = Column(TIMESTAMP(), default=func.now())
class WithoutKnowledgeDocument(Base): __tablename__ = 'crawler_withoutknowledgedocument' id = Column(Integer, Sequence('crawler_withoutknowledgedocument_id_seq'), primary_key=True) url = Column(String, nullable=False) title = Column(String(80), nullable=False) content = Column(String, nullable=False) created_date = Column(TIMESTAMP(timezone=True), nullable=False, default=datetime.now()) def __init__(self, url, title, content): self.url = url self.title = title self.content = content def __repr__(self): return "<withoutknowledgedocument('%s', '%s', '%s', '%s')>" % ( self.id, self.url, self.title, self.content)
class InventorySvc(Base): __tablename__ = 'inventory_svcs' id = Column(Integer, Sequence('inventory_svcs_id_seq'), primary_key=True, nullable=False) """Relation to inventory hosts""" host_id = Column(Integer, ForeignKey('inventory_hosts.id')) host = relationship('InventoryHost', backref='inventory_svcs', order_by=id) protocol = Column(Text) portid = Column(Integer) name = Column(Text) svc_product = Column(Text) extra_info = Column(Text) """Relation to tie products to inventory services""" product_id = Column(Integer, ForeignKey('products.id')) product = relationship('Product', backref='inventory_svcs', order_by=id) created_at = Column(TIMESTAMP(timezone=False), default=_get_date)
class Message(Base): __tablename__ = 'messages' id = Column(Integer, primary_key=True) sender_id = Column(Integer, ForeignKey(User.user_id)) receiver_id = Column(Integer, ForeignKey(User.user_id)) message = Column(String) sent_time = Column(TIMESTAMP(timezone=True), default=utc.localize(dt.now())) read = Column(Boolean, nullable=False, server_default='false', default=False) def to_json(self): return { 'sender': self.sender_id, 'reciever': self.receiver_id, 'message': self.message, 'sent_time': json.dumps(self.sent_time) }
class CatalogRecord(Base): """Model of a many-to-many catalog-record association, representing the state of a record with respect to a public catalog.""" __tablename__ = 'catalog_record' catalog_id = Column(String, ForeignKey('catalog.id', ondelete='CASCADE'), primary_key=True) record_id = Column(String, ForeignKey('record.id', ondelete='CASCADE'), primary_key=True) catalog = relationship('Catalog', viewonly=True) record = relationship('Record', viewonly=True) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) validity = Column(JSONB, nullable=False) published = Column(Boolean, nullable=False) published_record = Column(JSONB)
class PredictionModelRunTimestamp(Base): """ Identify which prediction model run (e.g. 2020 07 07 12:00).""" __tablename__ = 'prediction_model_run_timestamps' __table_args__ = (UniqueConstraint( 'prediction_model_id', 'prediction_run_timestamp'), { 'comment': 'Identify which prediction model run (e.g. 2020 07 07 12:00).' }) id = Column(Integer, Sequence('prediction_model_run_timestamps_id_seq'), primary_key=True, nullable=False, index=True) # Is it GPDS or RDPS? prediction_model_id = Column(Integer, ForeignKey('prediction_models.id'), nullable=False) prediction_model = relationship("PredictionModel") # The date and time of the model run. prediction_run_timestamp = Column(TIMESTAMP(timezone=True), nullable=False)
class GoodsModel(Base): __tablename__ = 't_goods' goods_id = Column(Integer, primary_key=True) # 商品id brand = Column(String(64)) # 品牌 category = Column(String(64)) # 分类 price = Column(Integer) # 价格 name = Column(String(512)) # 商品名称 url = Column(String(160)) # 图片地址 visible_price = Column(String(64)) # 显示价格 worth = Column(Integer) # 值 worthless = Column(Integer) # 不值 time = Column(DateTime) # 发布时间 update_time = Column(TIMESTAMP(True), nullable=False, server_default=func.now(), onupdate=func.now()) # 更新时间 def __init__(self, **items): for key in items: if hasattr(self, key): setattr(self, key, items[key])
class ScraperResult(DatabaseModel): """Model for the result from a scraper processing a url.""" __tablename__ = "scraper_results" result_id: int = Column(Integer, primary_key=True) url: str = Column(Text, nullable=False, index=True) scraper_type: ScraperType = Column(ENUM(ScraperType), nullable=False) scrape_time: datetime = Column( TIMESTAMP(timezone=True), nullable=False, index=True, server_default=text("NOW()"), ) data: Any = Column(JSONB(none_as_null=True)) def __init__(self, url: str, scraper_type: ScraperType, data: Any): """Create a new ScraperResult.""" self.url = url self.scraper_type = scraper_type self.data = data
class TrainingSet(Base): """sets of document for train and test models""" __tablename__ = 'trainingsets' set_id = Column(UUIDType(binary=False), server_default=text("uuid_generate_v4()"), primary_key=True) # human-readable name name = Column(String(511)) # creating date set_created = Column(TIMESTAMP(), server_default=functions.current_timestamp()) # number of documents in set doc_num = Column(Integer()) # id of all documents in set doc_ids = Column(ARRAY(UUIDType(binary=False), ForeignKey('documents.doc_id'))) # inverse doument frequency for all lemmas of set idf = Column(JSONB()) # index of documents in object_features: key - doc_id, value - row number doc_index = Column(JSONB()) # index of lemmas in object_features: key - lemma, value - column number lemma_index = Column(JSONB()) # object-features matrix object_features = Column(ARRAY(item_type=Float, dimensions=2))
def test_reflect_dates(self, metadata, connection): Table( "date_types", metadata, Column("d1", sqltypes.DATE), Column("d2", oracle.DATE), Column("d3", TIMESTAMP), Column("d4", TIMESTAMP(timezone=True)), Column("d5", oracle.INTERVAL(second_precision=5)), ) metadata.create_all(connection) m = MetaData() t1 = Table("date_types", m, autoload_with=connection) assert isinstance(t1.c.d1.type, oracle.DATE) assert isinstance(t1.c.d1.type, DateTime) assert isinstance(t1.c.d2.type, oracle.DATE) assert isinstance(t1.c.d2.type, DateTime) assert isinstance(t1.c.d3.type, TIMESTAMP) assert not t1.c.d3.type.timezone assert isinstance(t1.c.d4.type, TIMESTAMP) assert t1.c.d4.type.timezone assert isinstance(t1.c.d5.type, oracle.INTERVAL)
class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True, autoincrement=True) username = Column(String(32), nullable=False) nickname = Column(String(32)) password = Column(String(256), nullable=False) icon = Column(String(1024)) sign = Column(String(1024)) email = Column(String(32)) birth = Column(String(16)) mobile = Column(String(32)) gender = Column(Integer, default=0) last_ip_addr = Column(String(10), nullable=False) create_time = Column(TIMESTAMP(True), nullable=False, server_default=text('NOW()')) last_login_time = Column(String(256)) discription = Column(String(256)) def __repr__(self): return "<User(name='%s', password='******')>" % (self.username, self.password)
class TopicVote(DatabaseModel): """Model for a user's vote on a topic. Trigger behavior: Outgoing: - Inserting or deleting a row will increment or decrement the num_votes column for the relevant topic. """ __tablename__ = 'topic_votes' user_id: int = Column( Integer, ForeignKey('users.user_id'), nullable=False, primary_key=True, ) topic_id: int = Column( Integer, ForeignKey('topics.topic_id'), nullable=False, primary_key=True, ) created_time: datetime = Column( TIMESTAMP(timezone=True), nullable=False, index=True, server_default=text('NOW()'), ) user: User = relationship('User', innerjoin=True) topic: Topic = relationship('Topic', innerjoin=True) def __init__(self, user: User, topic: Topic) -> None: """Create a new vote on a topic.""" self.user = user self.topic = topic incr_counter('votes', target_type='topic')
class GroupSubscription(DatabaseModel): """Model for a user's subscription to a group. Trigger behavior: Outgoing: - Inserting or deleting a row will increment or decrement the num_subscriptions column for the relevant group. """ __tablename__ = 'group_subscriptions' user_id: int = Column( Integer, ForeignKey('users.user_id'), nullable=False, primary_key=True, ) group_id: int = Column( Integer, ForeignKey('groups.group_id'), nullable=False, primary_key=True, ) created_time: datetime = Column( TIMESTAMP(timezone=True), nullable=False, index=True, server_default=text('NOW()'), ) user: User = relationship('User', innerjoin=True, backref='subscriptions') group: Group = relationship('Group', innerjoin=True, lazy=False) def __init__(self, user: User, group: Group) -> None: """Create a new subscription to a group.""" self.user = user self.group = group incr_counter('subscriptions')
def _get_type_from_df_col(col: pd.Series, index: bool): """ Take a pd.Series, return its SQLAlchemy datatype If it doesn't match anything, return String Args: col: pd.Series to check index: if True, index cannot be boolean Returns: sqlalchemy Type or None one of {Integer, Float, Boolean, DateTime, String, or None (for all NaN)} """ if col.isna().all(): return None if is_bool_dtype(col): if index: raise ValueError('boolean index does not make sense') return Boolean elif not index and series_is_boolean(col): return Boolean elif is_integer_dtype(col): # parse purported 'integer' columns in a new table. # if values are all zero, make it a float for added safety - common case of a float that is often zero # if database table is type INTEGER, this will be parsed back to int later anyway if index: return Integer for val in col.unique(): if pd.isna(val): continue if val != 0: return Integer return Float elif is_float_dtype(col): return Float elif is_datetime64_any_dtype(col): return TIMESTAMP(timezone=True) else: return String
def test_time_types(self, metadata, connection): specs = [] if testing.requires.mysql_fsp.enabled: fsps = [None, 0, 5] else: fsps = [None] for type_ in (mysql.TIMESTAMP, mysql.DATETIME, mysql.TIME): # MySQL defaults fsp to 0, and if 0 does not report it. # we don't actually render 0 right now in DDL but even if we do, # it comes back blank for fsp in fsps: if fsp: specs.append((type_(fsp=fsp), type_(fsp=fsp))) else: specs.append((type_(), type_())) specs.extend([(TIMESTAMP(), mysql.TIMESTAMP()), (DateTime(), mysql.DATETIME())]) # note 'timezone' should always be None on both self._run_test(metadata, connection, specs, ["fsp", "timezone"])
class Pricing(DB.BASE): __tablename__ = "pricings" id = Column(Integer, primary_key=True) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) asset = Column(String, nullable=False) open = Column(FLOAT, nullable=False) high = Column(FLOAT, nullable=False) low = Column(FLOAT, nullable=False) close = Column(FLOAT, nullable=False) volume = Column(FLOAT, nullable=False) __table_args__ = (UniqueConstraint("timestamp", "asset"), ) def __init__(self, timestamp, asset, open, high, low, close, volume): self.timestamp = timestamp self.asset = asset self.open = open self.high = high self.low = low self.close = close self.volume = volume