def test_nullable_kwarg(self): t = Table( "t", MetaData(), Column("a", Integer(), Identity(), nullable=False), Column("b", Integer(), Identity(), nullable=True), Column("c", Integer(), Identity()), ) is_(t.c.a.nullable, False) is_(t.c.b.nullable, True) is_(t.c.c.nullable, False) nullable = "" if getattr( self, "__dialect__", None) != "default_enhanced" and testing.against("postgresql"): nullable = " NULL" self.assert_compile( CreateTable(t), ("CREATE TABLE t (" "a INTEGER GENERATED BY DEFAULT AS IDENTITY, " "b INTEGER GENERATED BY DEFAULT AS IDENTITY%s, " "c INTEGER GENERATED BY DEFAULT AS IDENTITY" ")") % nullable, )
def test_autoincrement_column(self): t = Table( "t", MetaData(), Column("y", Integer, Identity(), primary_key=True), ) assert t._autoincrement_column is t.c.y t2 = Table("t2", MetaData(), Column("y", Integer, Identity())) assert t2._autoincrement_column is None
class StateAttributes(Base): # type: ignore[misc,valid-type] """State attribute change history.""" __table_args__ = ({ "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_STATE_ATTRIBUTES attributes_id = Column(Integer, Identity(), primary_key=True) hash = Column(BigInteger, index=True) # Note that this is not named attributes to avoid confusion with the states table shared_attrs = Column(Text().with_variant(mysql.LONGTEXT, "mysql")) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.StateAttributes(" f"id={self.attributes_id}, hash='{self.hash}', attributes='{self.shared_attrs}'" f")>") @staticmethod def from_event(event: Event) -> StateAttributes: """Create object from a state_changed event.""" state: State | None = event.data.get("new_state") # None state means the state was removed from the state machine dbstate = StateAttributes(shared_attrs="{}" if state is None else JSON_DUMP(state.attributes)) dbstate.hash = StateAttributes.hash_shared_attrs(dbstate.shared_attrs) return dbstate @staticmethod def shared_attrs_from_event( event: Event, exclude_attrs_by_domain: dict[str, set[str]]) -> str: """Create shared_attrs from a state_changed event.""" state: State | None = event.data.get("new_state") # None state means the state was removed from the state machine if state is None: return "{}" domain = split_entity_id(state.entity_id)[0] exclude_attrs = (exclude_attrs_by_domain.get(domain, set()) | ALL_DOMAIN_EXCLUDE_ATTRS) return JSON_DUMP({ k: v for k, v in state.attributes.items() if k not in exclude_attrs }) @staticmethod def hash_shared_attrs(shared_attrs: str) -> int: """Return the hash of json encoded shared attributes.""" return cast(int, fnv1a_32(shared_attrs.encode("utf-8"))) def to_native(self) -> dict[str, Any]: """Convert to an HA state object.""" try: return cast(dict[str, Any], json.loads(self.shared_attrs)) except ValueError: # When json.loads fails _LOGGER.exception("Error converting row to state attributes: %s", self) return {}
def get_sequence_or_identity( sequence_name: str) -> Union[Sequence, 'Identity']: """ Depending on the engine it either returns Sequence, or Identity (in case of MSSQL in SQLAlchemy 1.4). In SQLAlchemy 1.4 using sequence is not allowed for primary key columns in MsSQL. Primary columns in MsSQL use IDENTITY keyword to auto increment. Using Sequence for those fields used to be allowed in SQLAlchemy 1.3 (and essentially ignored if only name was specified). See https://docs.sqlalchemy.org/en/14/dialects/mssql.html Changed in version 1.4: Removed the ability to use a Sequence object to modify IDENTITY characteristics. Sequence objects now only manipulate true T-SQL SEQUENCE types. :param sequence_name: name of the sequence :return: Sequence or Identity """ from airflow.settings import SQL_ALCHEMY_CONN if SQL_ALCHEMY_CONN is not None and SQL_ALCHEMY_CONN.startswith('mssql'): try: from sqlalchemy import Identity return Identity() except Exception: # Identity object is only available in SQLAlchemy 1.4. # For SQLAlchemy 1.3 compatibility we return original Sequence if Identity is missing pass return Sequence(sequence_name)
class StatisticsMeta(Base): # type: ignore """Statistics meta data.""" __table_args__ = ({ "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_STATISTICS_META id = Column(Integer, Identity(), primary_key=True) statistic_id = Column(String(255), index=True) source = Column(String(32)) unit_of_measurement = Column(String(255)) has_mean = Column(Boolean) has_sum = Column(Boolean) @staticmethod def from_meta( source: str, statistic_id: str, unit_of_measurement: str | None, has_mean: bool, has_sum: bool, ) -> StatisticsMeta: """Create object from meta data.""" return StatisticsMeta( source=source, statistic_id=statistic_id, unit_of_measurement=unit_of_measurement, has_mean=has_mean, has_sum=has_sum, )
def define_tables(cls, metadata): Table( "test", metadata, Column("id", Integer, Identity(), primary_key=True), Column("foo", Integer), )
class Affiliation(Model): __tablename__ = 'researcher_affiliations' id = Column('affiliation_id', Integer, Identity(always=True), primary_key=True) researcher_id = Column(ForeignKey(Researcher.id), nullable=False) institute_id = Column(ForeignKey(Institute.id), nullable=False) type = Column(Text, nullable=True) __table_args__ = (Index('affiliations_idx', researcher_id, institute_id, type, unique=True), ) researcher = relationship(Researcher, backref="affiliations") institute = relationship(Institute, backref="affiliations") @classmethod def creator(cls: Type['Affiliation'], arg_name) -> Callable[[Model], 'Affiliation']: def the_creator(arg: Model) -> 'Affiliation': kwargs = {arg_name: arg} return cls(**kwargs) return the_creator
class StatisticsBase: """Statistics base class.""" id = Column(Integer, Identity(), primary_key=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) @declared_attr def metadata_id(self): """Define the metadata_id column for sub classes.""" return Column( Integer, ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), index=True, ) start = Column(DATETIME_TYPE, index=True) mean = Column(DOUBLE_TYPE) min = Column(DOUBLE_TYPE) max = Column(DOUBLE_TYPE) last_reset = Column(DATETIME_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData): """Create object from a statistics.""" return cls( # type: ignore metadata_id=metadata_id, **stats, )
def test_identity_is_ignored_in_pk(self, dialect, autoincrement): t = Table( "foo_table", MetaData(), Column( "foo", Integer(), Identity("always", start=3), primary_key=True, autoincrement=autoincrement, ), ) t_exp = Table( "foo_table", MetaData(), Column("foo", Integer(), primary_key=True, autoincrement=autoincrement), ) dialect = self.get_dialect(dialect) exp = CreateTable(t_exp).compile(dialect=dialect).string self.assert_compile(CreateTable(t), re.sub(r"[\n\t]", "", exp), dialect=dialect)
def test_identity_is_ignored(self): t = Table( "foo_table", MetaData(), Column("foo", Integer(), Identity("always", start=3)), ) self.assert_compile(CreateTable(t), "CREATE TABLE foo_table (foo INTEGER)")
class EventData(Base): # type: ignore[misc,valid-type] """Event data history.""" __table_args__ = ( {"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"}, ) __tablename__ = TABLE_EVENT_DATA data_id = Column(Integer, Identity(), primary_key=True) hash = Column(BigInteger, index=True) # Note that this is not named attributes to avoid confusion with the states table shared_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
class StatisticsRuns(Base): # type: ignore """Representation of statistics run.""" __tablename__ = TABLE_STATISTICS_RUNS run_id = Column(Integer, Identity(), primary_key=True) start = Column(DateTime(timezone=True)) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.StatisticsRuns(" f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', " f")>")
def define_tables(cls, metadata): for i, col in enumerate([ Column( "id1", Integer, Identity( always=True, start=2, increment=3, minvalue=-2, maxvalue=42, cycle=True, cache=4, ), ), Column("id2", Integer, Identity()), Column("id3", sqltypes.BigInteger, Identity()), Column("id4", sqltypes.SmallInteger, Identity()), Column("id5", sqltypes.Numeric, Identity()), ]): Table("t%s" % i, metadata, col)
class CollectionAudit(Base): """Collection audit log.""" __tablename__ = 'collection_audit' id = Column(Integer, Identity(), primary_key=True) client_id = Column(String, nullable=False) user_id = Column(String) command = Column(Enum(AuditCommand), nullable=False) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) _id = Column(String, nullable=False) _name = Column(String) _doi_key = Column(String) _provider_id = Column(String)
class SchemaChanges(Base): # type: ignore """Representation of schema version changes.""" __tablename__ = TABLE_SCHEMA_CHANGES change_id = Column(Integer, Identity(), primary_key=True) schema_version = Column(Integer) changed = Column(DateTime(timezone=True), default=dt_util.utcnow) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.SchemaChanges(" f"id={self.change_id}, schema_version={self.schema_version}, " f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'" f")>")
class RecorderRuns(Base): # type: ignore """Representation of recorder run.""" __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) __tablename__ = TABLE_RECORDER_RUNS run_id = Column(Integer, Identity(), primary_key=True) start = Column(DateTime(timezone=True), default=dt_util.utcnow) end = Column(DateTime(timezone=True)) closed_incorrect = Column(Boolean, default=False) created = Column(DateTime(timezone=True), default=dt_util.utcnow) def __repr__(self) -> str: """Return string representation of instance for debugging.""" end = ( f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None ) return ( f"<recorder.RecorderRuns(" f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', " f"end={end}, closed_incorrect={self.closed_incorrect}, " f"created='{self.created.isoformat(sep=' ', timespec='seconds')}'" f")>" ) def entity_ids(self, point_in_time=None): """Return the entity ids that existed in this run. Specify point_in_time if you want to know which existed at that point in time inside the run. """ session = Session.object_session(self) assert session is not None, "RecorderRuns need to be persisted" query = session.query(distinct(States.entity_id)).filter( States.last_updated >= self.start ) if point_in_time is not None: query = query.filter(States.last_updated < point_in_time) elif self.end is not None: query = query.filter(States.last_updated < self.end) return [row[0] for row in query] def to_native(self, validate_entity_id=True): """Return self, native format is this model.""" return self
class RecordTagAudit(Base): """Record tag audit log.""" __tablename__ = 'record_tag_audit' id = Column(Integer, Identity(), primary_key=True) client_id = Column(String, nullable=False) user_id = Column(String) command = Column(Enum(AuditCommand), nullable=False) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) _id = Column(String, nullable=False) _record_id = Column(String, nullable=False) _tag_id = Column(String, nullable=False) _user_id = Column(String) _data = Column(JSONB)
def test_identity_is_ignored(self): t = Table( "foo_table", MetaData(), Column("foo", Integer(), Identity("always", start=3)), ) t2 = Table( "foo_table", MetaData(), Column("foo", Integer()), ) exp = CreateTable(t2).compile(dialect=testing.db.dialect) self.assert_compile( CreateTable(t), re.sub(r"[\n\t]", "", str(exp)), )
def test_on_null(self): t = Table( "foo_table", MetaData(), Column( "foo", Integer(), Identity(always=False, on_null=True, start=42, order=True), ), ) text = " ON NULL" if testing.against("oracle") else "" self.assert_compile( CreateTable(t), ("CREATE TABLE foo_table (foo INTEGER GENERATED BY DEFAULT" + text + " AS IDENTITY (START WITH 42 ORDER))"), )
def test_create_ddl(self, identity_args, text): t = Table( "foo_table", MetaData(), Column("foo", Integer(), Identity(**identity_args)), ) self.assert_compile( CreateTable(t), "CREATE TABLE foo_table (foo INTEGER GENERATED %s)" % text, ) t2 = t.to_metadata(MetaData()) self.assert_compile( CreateTable(t2), "CREATE TABLE foo_table (foo INTEGER GENERATED %s)" % text, )
class EventData(Base): # type: ignore[misc,valid-type] """Event data history.""" __table_args__ = ({ "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_EVENT_DATA data_id = Column(Integer, Identity(), primary_key=True) hash = Column(BigInteger, index=True) # Note that this is not named attributes to avoid confusion with the states table shared_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql")) def __repr__(self) -> str: """Return string representation of instance for debugging.""" return ( f"<recorder.EventData(" f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'" f")>") @staticmethod def from_event(event: Event) -> EventData: """Create object from an event.""" shared_data = json_bytes(event.data) return EventData( shared_data=shared_data.decode("utf-8"), hash=EventData.hash_shared_data_bytes(shared_data), ) @staticmethod def shared_data_bytes_from_event(event: Event) -> bytes: """Create shared_data from an event.""" return json_bytes(event.data) @staticmethod def hash_shared_data_bytes(shared_data_bytes: bytes) -> int: """Return the hash of json encoded shared data.""" return cast(int, fnv1a_32(shared_data_bytes)) def to_native(self) -> dict[str, Any]: """Convert to an HA state object.""" try: return cast(dict[str, Any], json_loads(self.shared_data)) except JSON_DECODE_EXCEPTIONS: _LOGGER.exception("Error converting row to event data: %s", self) return {}
class RecordAudit(Base): """Record audit log.""" __tablename__ = 'record_audit' id = Column(Integer, Identity(), primary_key=True) client_id = Column(String, nullable=False) user_id = Column(String) command = Column(Enum(AuditCommand), nullable=False) timestamp = Column(TIMESTAMP(timezone=True), nullable=False) _id = Column(String, nullable=False) _doi = Column(String) _sid = Column(String) _metadata = Column(JSONB) _collection_id = Column(String) _schema_id = Column(String)
def test_identity_is_ignored(self, dialect): t = Table( "foo_table", MetaData(), Column("foo", Integer(), Identity("always", start=3)), ) t_exp = Table( "foo_table", MetaData(), Column("foo", Integer(), nullable=False), ) dialect = self.get_dialect(dialect) exp = CreateTable(t_exp).compile(dialect=dialect).string self.assert_compile(CreateTable(t), re.sub(r"[\n\t]", "", exp), dialect=dialect)
class AlertModel(Base): """Alert ORM that defines a table.""" __tablename__ = 'alert' id = Column('id', Integer, Identity(start=1, cycle=True), primary_key=True) email = Column('email', String, nullable=False) prism_url = Column('prism_url', String, nullable=False) alert_name = Column('alert_name', String) alert_config = Column('alert_config', JSON, nullable=False) min = Column('min', Integer) max = Column('max', Integer) zones = Column('zones', JSON, nullable=False) created_at = Column('created_at', DateTime, default=datetime.datetime.now) updated_at = Column('updated_at', DateTime, default=datetime.datetime.now, onupdate=datetime.datetime.now) last_triggered = Column('last_triggered', TIMESTAMP, nullable=True)
def test_other_options(self): t = Table( "foo_table", MetaData(), Column( "foo", Integer(), Identity(always=True, start=3), nullable=False, unique=True, ), ) self.assert_compile( CreateTable(t), "CREATE TABLE foo_table (" "foo INTEGER GENERATED ALWAYS AS IDENTITY (START " "WITH 3), UNIQUE (foo))", )
def test_autoincrement_true(self): t = Table( "foo_table", MetaData(), Column( "foo", Integer(), Identity(always=True, start=3), primary_key=True, autoincrement=True, ), ) self.assert_compile( CreateTable(t), "CREATE TABLE foo_table (" "foo INTEGER GENERATED ALWAYS AS IDENTITY (START WITH 3)" ", PRIMARY KEY (foo))", )
class Repository(db.Model): __tablename__ = "repositories" __table_args__ = ( #Index("hosting_service_id_index", "hosting_service_id"), # UNLOGGED prevents wal write (as we will never edit the table, it should be fine?) { "prefixes": ["UNLOGGED"] }, ) # use identity to avoid reusing same sequence when we copy this table: # https://stackoverflow.com/a/12265248 id = db.Column(db.Integer, Identity(), primary_key=True) # id on the hosting_service foreign_id = db.Column(db.Integer) hosting_service_id = db.Column( db.Integer, db.ForeignKey("hosting_service.id"), nullable=True, ) hosting_service = db.relationship("HostingService", backref=db.backref("repos", lazy=True)) name = db.Column(db.String(500), nullable=False) username = db.Column(db.String(500), nullable=False) description = db.Column(db.Text(), nullable=True) created_at = db.Column(db.DateTime(), nullable=True) updated_at = db.Column(db.DateTime(), nullable=True) pushed_at = db.Column(db.DateTime(), nullable=True) stars_count = db.Column(db.Integer(), nullable=True) forks_count = db.Column(db.Integer(), nullable=True) is_private = db.Column(db.Boolean()) is_fork = db.Column(db.Boolean()) is_archived = db.Column(db.Boolean()) is_mirror = db.Column(db.Boolean()) is_empty = db.Column(db.Boolean()) homepage_url = db.Column(db.String(500), nullable=True) repo_url = db.Column(db.String(500), nullable=True)
class StatisticsMeta(Base): # type: ignore[misc,valid-type] """Statistics meta data.""" __table_args__ = ({ "mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci" }, ) __tablename__ = TABLE_STATISTICS_META id = Column(Integer, Identity(), primary_key=True) statistic_id = Column(String(255), index=True, unique=True) source = Column(String(32)) unit_of_measurement = Column(String(255)) has_mean = Column(Boolean) has_sum = Column(Boolean) name = Column(String(255)) @staticmethod def from_meta(meta: StatisticMetaData) -> StatisticsMeta: """Create object from meta data.""" return StatisticsMeta(**meta)
def test_to_metadata(self): identity1 = Identity("by default", on_null=True, start=123) m = MetaData() t = Table("t", m, Column("x", Integer), Column("y", Integer, identity1)) is_(identity1.column, t.c.y) # is_(t.c.y.server_onupdate, identity1) is_(t.c.y.server_default, identity1) m2 = MetaData() t2 = t.to_metadata(m2) identity2 = t2.c.y.server_default is_not_(identity1, identity2) is_(identity1.column, t.c.y) # is_(t.c.y.server_onupdate, identity1) is_(t.c.y.server_default, identity1) is_(identity2.column, t2.c.y) # is_(t2.c.y.server_onupdate, identity2) is_(t2.c.y.server_default, identity2)
def test_create_ddl(self, identity_args, text): if getattr(self, "__dialect__", None) != "default" and testing.against("oracle"): text = text.replace("NO MINVALUE", "NOMINVALUE").replace("NO MAXVALUE", "NOMAXVALUE") t = Table( "foo_table", MetaData(), Column("foo", Integer(), Identity(**identity_args)), ) self.assert_compile( CreateTable(t), "CREATE TABLE foo_table (foo INTEGER GENERATED %s)" % text, ) t2 = t.to_metadata(MetaData()) self.assert_compile( CreateTable(t2), "CREATE TABLE foo_table (foo INTEGER GENERATED %s)" % text, )