class UtcTimestamp(TypeDecorator): """Custom SQLAlchemy column type for storing timestamps in UTC in SQLite databases. This column type always returns timestamps with the UTC timezone. It also guards against accidentally trying to store Python naive timestamps (those without a time zone). In the SQLite database the timestamps are stored as strings of format: ``yyyy-mm-dd hh:mm:ss``. **UTC is always implied.** """ impl = sqlite.DATETIME(truncate_microseconds=True) def process_bind_param( self, value: Optional[datetime], dialect: Dialect) -> Optional[datetime]: # noqa: D102 if value is not None: if value.tzinfo is None: raise UtcTimestampException( f"Expected timestamp with tzinfo. Got naive timestamp {value!r} instead" ) return value.astimezone(timezone.utc) return value def process_result_value( self, value: Optional[datetime], dialect: Dialect) -> Optional[datetime]: # noqa: D102 if value is not None: if value.tzinfo is not None: return value.astimezone(timezone.utc) return value.replace(tzinfo=timezone.utc) return value
class OVNRevisionNumbers(model_base.BASEV2): __tablename__ = 'ovn_revision_numbers' standard_attr_id = sa.Column(sa.BigInteger().with_variant( sa.Integer(), 'sqlite'), sa.ForeignKey('standardattributes.id', ondelete='SET NULL'), nullable=True) resource_uuid = sa.Column(sa.String(36), nullable=False, index=True) resource_type = sa.Column(sa.String(36), nullable=False, index=True) revision_number = sa.Column(sa.BigInteger().with_variant( sa.Integer(), 'sqlite'), server_default='0', default=0, nullable=False) created_at = sa.Column(sa.DateTime().with_variant( sqlite.DATETIME(truncate_microseconds=True), 'sqlite'), default=sa.func.now(), nullable=False) updated_at = sa.Column(sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now(), nullable=True) __table_args__ = (sa.PrimaryKeyConstraint( resource_uuid, resource_type, name='ovn_revision_numbers0resource_uuid0resource_type'), model_base.BASEV2.__table_args__)
class OpenDaylightJournal(model_base.BASEV2): __tablename__ = 'opendaylightjournal' seqnum = sa.Column(IdType, primary_key=True, autoincrement=True) object_type = sa.Column(sa.String(36), nullable=False) object_uuid = sa.Column(sa.String(36), nullable=False) operation = sa.Column(sa.String(36), nullable=False) data = sa.Column(sa.PickleType, nullable=True) state = sa.Column(sa.Enum(odl_const.PENDING, odl_const.FAILED, odl_const.PROCESSING, odl_const.COMPLETED), nullable=False, default=odl_const.PENDING) retry_count = sa.Column(sa.Integer, default=0) created_at = sa.Column( sa.DateTime().with_variant( sqlite.DATETIME(truncate_microseconds=True), 'sqlite'), server_default=sa.func.now()) last_retried = sa.Column(sa.TIMESTAMP, server_default=sa.func.now(), onupdate=sa.func.now()) version_id = sa.Column(sa.Integer, server_default='0', nullable=False) dependencies = sa.orm.relationship( "OpenDaylightJournal", secondary=journal_dependencies, primaryjoin=seqnum == journal_dependencies.c.depends_on, secondaryjoin=seqnum == journal_dependencies.c.dependent, backref="depending_on" ) __mapper_args__ = { 'version_id_col': version_id }
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'blocks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('index', sa.INTEGER(), nullable=False), sa.Column('hash', sa.String(length=64), nullable=False), sa.Column('previous_hash', sa.String(length=64), nullable=False), sa.Column('difficulty', sa.INTEGER(), nullable=False), sa.Column('timestamp', sqlite.DATETIME(), nullable=False), sa.Column('nonce', sa.String(length=64), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_blocks_hash'), 'blocks', ['hash'], unique=True) op.create_index(op.f('ix_blocks_index'), 'blocks', ['index'], unique=True) op.create_index(op.f('ix_blocks_previous_hash'), 'blocks', ['previous_hash'], unique=True) op.create_table('transactions', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('txid', sa.String(length=64), nullable=False), sa.Column('block_id', sa.INTEGER(), nullable=False), sa.Column('position', sa.INTEGER(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_transactions_block_id'), 'transactions', ['block_id'], unique=False) op.create_index(op.f('ix_transactions_position'), 'transactions', ['position'], unique=False) op.create_index(op.f('ix_transactions_txid'), 'transactions', ['txid'], unique=False) op.create_table( 'tx_ins', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('transaction_id', sa.INTEGER(), nullable=False), sa.Column('signature', sa.String(length=256), nullable=False), sa.Column('tx_out_id', sa.String(length=64), nullable=False), sa.Column('tx_out_index', sa.INTEGER(), nullable=False), sa.Column('position', sa.INTEGER(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_tx_ins_position'), 'tx_ins', ['position'], unique=False) op.create_index(op.f('ix_tx_ins_transaction_id'), 'tx_ins', ['transaction_id'], unique=False) op.create_table( 'tx_outs', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('transaction_id', sa.INTEGER(), nullable=False), sa.Column('position', sa.INTEGER(), nullable=False), sa.Column('address', sa.String(length=64), nullable=False), sa.Column('amount', sa.NUMERIC(precision=8, scale=6, asdecimal=False), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_tx_outs_position'), 'tx_outs', ['position'], unique=False) op.create_index(op.f('ix_tx_outs_transaction_id'), 'tx_outs', ['transaction_id'], unique=False)
def load_dialect_impl(self, dialect): if dialect.name == "sqlite": return sqlite.DATETIME( storage_format="%(year)04d%(month)02d%(day)02d%(hour)02d%(minute)02d%(second)02d", regexp=r"(\d{4})-?(\d{2})-?(\d{2}) ?(\d{2}):?(\d{2}):?(\d{2})", ) else: return types.DateTime()
def get_time_from_datetime(datetime_str): from sqlalchemy.dialects import sqlite if datetime_str is None: return None try: inp_datetime = sqlite.DATETIME().result_processor( sqlite, sqlite.DATETIME())(datetime_str) result = inp_datetime.time() result_str = sqlite.TIME().bind_processor(sqlite)(result) except: if sqlite_trace_on_exception: import pdb pdb.set_trace() raise return result_str
def addtime(datetime_str, time_str): from sqlalchemy.dialects import sqlite if datetime_str is None or time_str is None: return None try: inp_datetime = sqlite.DATETIME().result_processor( sqlite, sqlite.DATETIME())(datetime_str) inp_time = sqlite.TIME().result_processor(sqlite, sqlite.TIME())(time_str) result = inp_datetime + (datetime.combine(datetime.min, inp_time) - datetime.combine(datetime.min, time(0))) result_str = sqlite.DATETIME().bind_processor(sqlite)(result) except: if sqlite_trace_on_exception: import pdb pdb.set_trace() raise return result_str
def timediff(datetime1_str, datetime2_str): from sqlalchemy.dialects import sqlite if datetime1_str is None or datetime2_str is None: return None try: datetime1 = sqlite.DATETIME().result_processor( sqlite, sqlite.DATETIME())(datetime1_str) datetime2 = sqlite.DATETIME().result_processor( sqlite, sqlite.DATETIME())(datetime2_str) result = datetime1 - datetime2 if result >= timedelta(0): result_as_time = (datetime.min + result).time() else: result_as_time = (datetime.max + result).time() except: if sqlite_trace_on_exception: import pdb pdb.set_trace() raise return sqlite.TIME().bind_processor(sqlite)(result_as_time)
class OpenDaylightJournal(model_base.BASEV2): __tablename__ = 'opendaylightjournal' seqnum = sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), primary_key=True, autoincrement=True) object_type = sa.Column(sa.String(36), nullable=False) object_uuid = sa.Column(sa.String(36), nullable=False) operation = sa.Column(sa.String(36), nullable=False) data = sa.Column(sa.PickleType, nullable=True) state = sa.Column(sa.Enum(odl_const.PENDING, odl_const.FAILED, odl_const.PROCESSING, odl_const.COMPLETED), nullable=False, default=odl_const.PENDING) retry_count = sa.Column(sa.Integer, default=0) created_at = sa.Column( sa.DateTime().with_variant( sqlite.DATETIME(truncate_microseconds=True), 'sqlite'), server_default=sa.func.now()) last_retried = sa.Column(sa.TIMESTAMP, server_default=sa.func.now(), onupdate=sa.func.now())
from dateutil.parser import parse import pandas as pd import numpy as np from great_expectations.dataset import PandasDataset, SqlAlchemyDataset, SparkDFDataset from great_expectations.profile import ColumnsExistProfiler try: import sqlalchemy.dialects.sqlite as sqlitetypes SQLITE_TYPES = { "VARCHAR": sqlitetypes.VARCHAR, "CHAR": sqlitetypes.CHAR, "INTEGER": sqlitetypes.INTEGER, "SMALLINT": sqlitetypes.SMALLINT, "DATETIME": sqlitetypes.DATETIME(truncate_microseconds=True), "DATE": sqlitetypes.DATE, "FLOAT": sqlitetypes.FLOAT, "BOOLEAN": sqlitetypes.BOOLEAN } except ImportError: SQLITE_TYPES = {} try: import sqlalchemy.dialects.postgresql as postgresqltypes POSTGRESQL_TYPES = { "TEXT": postgresqltypes.TEXT, "CHAR": postgresqltypes.CHAR, "INTEGER": postgresqltypes.INTEGER, "SMALLINT": postgresqltypes.SMALLINT, "BIGINT": postgresqltypes.BIGINT,