Example #1
0
 def _boolean_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('flag', Boolean))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #2
0
 def sqla_metadata(self):
     # pylint: disable=no-member
     metadata = MetaData(bind=self.get_sqla_engine())
     return metadata.reflect()
 def _get_meta_data(engine):
     """search db objects like views or tables"""
     meta_data = MetaData(engine)
     meta_data.reflect(views=True)
     return meta_data
Example #4
0
    "drop_db",
    # util
    "test_db_connection", "get_contest_list", "is_contest_id",
    "ask_for_contest", "get_submissions", "get_submission_results",
    "get_datasets_to_judge", "enumerate_files"
]


# Instantiate or import these objects.

version = 39

engine = create_engine(config.database, echo=config.database_debug,
                       pool_timeout=60, pool_recycle=120)

metadata = MetaData(engine)

from .session import Session, ScopedSession, SessionGen, \
    custom_psycopg2_connection

from .types import CastingArray, Codename, Filename, FilenameSchema, \
    FilenameSchemaArray, Digest
from .base import Base
from .fsobject import FSObject, LargeObject
from .contest import Contest, Announcement, Presentation
from .user import User, Team, Participation, Message, Question
from .admin import Admin
from .task import Task, Statement, Attachment, Dataset, Manager, Testcase, \
    TaskName, TaskTitle
from .submission import Submission, File, Token, SubmissionResult, \
    Executable, Evaluation
Example #5
0
                        Table, Numeric)
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
import sqlahelper as sah
from geoalchemy2 import types as geotypes

# import WAM_APP_FRED.app_settings
# from geoalchemy2.types import Geometry
# from WAM_APP_FRED.cli.openFRED import mapped_classes, db_session

# ##########################################SQLAlchemy setup########################################
SCHEMA_1 = 'climate'
SCHEMA_2 = 'supply'
engine = sah.get_engine('oep_engine')
metadata_1 = MetaData(schema=SCHEMA_1, bind=engine)
metadata_2 = MetaData(schema=SCHEMA_2, bind=engine)
# ##########################################TABLE DEFINITION########################################

# included function from github: https://github.com/open-fred/cli/blob/master/openFRED.py
# copied function to avoid dependencies


def mapped_classes(metadata):
    """ Returns classes mapped to the openFRED database via SQLAlchemy.
    The classes are dynamically created and stored in a dictionary keyed by
    class names. The dictionary also contains the special entry `__Base__`,
    which an SQLAlchemy `declarative_base` instance used as the base class from
    which all mapped classes inherit.
    """
Example #6
0
    def test_nolength_string(self):
        metadata = MetaData()
        foo = Table('foo', metadata, Column('one', String))

        foo.create(config.db)
        foo.drop(config.db)
# define metadata for table

from sqlalchemy import MetaData
from sqlalchemy import Table, Column
from sqlalchemy import Integer, String

metadata = MetaData(
)  # metadata is collection of tables and can be traversed like XML DOM

user_table = Table("user", metadata, Column("id", Integer, primary_key=True),
                   Column("name", String))

# init engine over database
from sqlalchemy import create_engine

engine = create_engine("sqlite://", echo=True)

# create table from metadata
metadata.create_all(engine)

# now load metadata for the table from database
metadata2 = MetaData()
user_table_reflected = Table("user",
                             metadata2,
                             autoload=True,
                             autoload_with=engine)

print("-- TABLE SCHEMA LOADED FROM DB ---------------------")
print(repr(user_table_reflected))
Example #8
0
 def _fk_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('email', String()),
               Column('user_id', Integer, ForeignKey('user.id')))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #9
0
                        pool_timeout=1200,
                        pool_reset_on_return='commit')
    return eng


engine = get_engine()
Base = declarative_base()

Session = sessionmaker(bind=engine)
db_session = Session()

session_factory = sessionmaker(bind=engine)
ScopedSession = scoped_session(session_factory)
db_scoped_session = ScopedSession()

metadata = MetaData(get_engine())
db_lock = threading.RLock()

__all__ = [
    'engine', 'Base', 'db_session', 'metadata', 'db_lock', 'db_scoped_session',
    'ScopedSession'
]

# ss = ScopedSession()
# ss2 = ScopedSession()
# if ss is ss2:
#     print(1)
# ScopedSession.remove()
#
# if ss is ss2:
#     print(1)
Example #10
0
 def _ix_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('x', String()), Column('y', Integer),
               Index('ix1', 'y'))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #11
0
 def _sql_ck_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('email', String()))
     t.append_constraint(CheckConstraint(t.c.email.like('%@%')))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #12
0
 def _uq_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('x', String()), Column('y', Integer),
               UniqueConstraint('y', name='uq1'))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #13
0
 def _enum_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('thing', Enum('a', 'b', 'c')))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #14
0
 def _boolean_no_ck_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('flag', Boolean(create_constraint=False)))
     return ApplyBatchImpl(t, table_args, table_kwargs)
Example #15
0
    def test_basic(self, metadata, connection):

        s_table = Table(
            "sometable",
            metadata,
            Column("id_a", Unicode(255), primary_key=True),
            Column("id_b", Unicode(255), primary_key=True, unique=True),
            Column("group", Unicode(255), primary_key=True),
            Column("col", Unicode(255)),
            UniqueConstraint("col", "group"),
        )

        # "group" is a keyword, so lower case
        normalind = Index("tableind", s_table.c.id_b, s_table.c.group)
        Index(
            "compress1", s_table.c.id_a, s_table.c.id_b, oracle_compress=True
        )
        Index(
            "compress2",
            s_table.c.id_a,
            s_table.c.id_b,
            s_table.c.col,
            oracle_compress=1,
        )

        metadata.create_all(connection)

        mirror = MetaData()
        mirror.reflect(connection)

        metadata.drop_all(connection)
        mirror.create_all(connection)

        inspect = MetaData()
        inspect.reflect(connection)

        def obj_definition(obj):
            return (
                obj.__class__,
                tuple([c.name for c in obj.columns]),
                getattr(obj, "unique", None),
            )

        # find what the primary k constraint name should be
        primaryconsname = connection.scalar(
            text(
                """SELECT constraint_name
               FROM all_constraints
               WHERE table_name = :table_name
               AND owner = :owner
               AND constraint_type = 'P' """
            ),
            dict(
                table_name=s_table.name.upper(),
                owner=testing.db.dialect.default_schema_name.upper(),
            ),
        )

        reflectedtable = inspect.tables[s_table.name]

        # make a dictionary of the reflected objects:

        reflected = dict(
            [
                (obj_definition(i), i)
                for i in reflectedtable.indexes | reflectedtable.constraints
            ]
        )

        # assert we got primary key constraint and its name, Error
        # if not in dict

        assert (
            reflected[
                (PrimaryKeyConstraint, ("id_a", "id_b", "group"), None)
            ].name.upper()
            == primaryconsname.upper()
        )

        # Error if not in dict

        eq_(reflected[(Index, ("id_b", "group"), False)].name, normalind.name)
        assert (Index, ("id_b",), True) in reflected
        assert (Index, ("col", "group"), True) in reflected

        idx = reflected[(Index, ("id_a", "id_b"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 2

        idx = reflected[(Index, ("id_a", "id_b", "col"), False)]
        assert idx.dialect_options["oracle"]["compress"] == 1

        eq_(len(reflectedtable.constraints), 1)
        eq_(len(reflectedtable.indexes), 5)
Example #16
0
def create_tables(engine=user_engine):
    meta = MetaData()
    meta.create_all(bind=engine, tables=tables)
    print('-   创建表成功: ')
    for table in tables:
        print('-               %s' % table)
Example #17
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    table = define_hosts_table(meta)
    table.create()
Example #18
0
def drop_tables(engine=user_engine):
    meta = MetaData()
    meta.drop_all(bind=engine, tables=tables)
    print('-   删除表成功:')
    print(tables)
Example #19
0
from sqlalchemy import (create_engine, MetaData, Column, Table, Integer,
                        String, DateTime)
from datetime import datetime

engine = create_engine('sqlite:///teste.db', echo=True)
metadata = MetaData(bind=engine)

user_table = Table(
    'usuarios', metadata, Column('id', Integer, primary_key=True),
    Column('nome', String(40), index=True),
    Column('idade', Integer, nullable=False), Column('senha', String),
    Column('Criado_em', DateTime, default=datetime.now),
    Column('atualizado_em',
           DateTime,
           default=datetime.now,
           onupdate=datetime.now))

metadata.create_all(engine)
def upgrade():
    connection = op.get_bind()
    meta = MetaData()
    meta.reflect(bind=connection)

    commit_table = meta.tables["commit"]
    distribution_table = meta.tables["distribution"]
    machine_table = meta.tables["machine"]
    run_table = meta.tables["run"]
    summary_table = meta.tables["summary"]

    runs = connection.execute(run_table.select())
    commits = connection.execute(commit_table.select())
    distributions = connection.execute(distribution_table.select())
    machines = connection.execute(machine_table.select())
    runs_by_id = {r["id"]: r for r in runs}
    commits_by_id = {c["id"]: c for c in commits}
    machines_by_id = {m["id"]: m for m in machines}

    logging.info("1fed559406c5: Get benchmarks")
    summaries = connection.execute(
        summary_table.select()
        .join(run_table, run_table.c.id == summary_table.c.run_id)
        .filter(run_table.c.name.like("commit: %"))
    )

    i = 1

    logging.info("1fed559406c5: Truncate distribution table")
    connection.execute(distribution_table.delete())
    assert list(connection.execute(distribution_table.select())) == []

    for summary in summaries:
        run = runs_by_id.get(summary["run_id"])
        if not run:
            continue

        commit = commits_by_id.get(run["commit_id"])
        if not commit:
            continue

        if commit.timestamp is None:
            continue

        m = machines_by_id[run["machine_id"]]
        machine_hash = f"{m.name}-{m.gpu_count}-{m.cpu_core_count}-{m.cpu_thread_count}-{m.memory_bytes}"

        distributions = list(
            connection.execute(
                get_distribution(
                    summary_table,
                    run_table,
                    machine_table,
                    commit_table,
                    commit,
                    summary,
                    machine_hash,
                    100,
                )
            )
        )

        if not distributions:
            continue

        distribution = distributions[0]
        values = dict(distribution)
        machine_hash = values.pop("hash")
        values["id"] = uuid.uuid4().hex
        values["machine_hash"] = machine_hash
        values["limit"] = 100

        connection.execute(
            insert(distribution_table)
            .values(values)
            .on_conflict_do_update(
                index_elements=["case_id", "context_id", "commit_id", "machine_hash"],
                set_=values,
            )
        )
        logging.info(f"1fed559406c5: Processed {i} summary")
        i += 1

    logging.info("1fed559406c5: Done with migration")
Example #21
0
    CheckConstraint,
    Integer,
    String,
    DateTime,
    Boolean,
    Date,
    Text,
    SmallInteger,
    DDL,
    sql,
)
from sqlalchemy.dialects.postgresql import ARRAY, UUID, INET, JSONB

metadata = MetaData(
    naming_convention={
        'fk': '%(table_name)s_fk_%(column_0_name)s',
        'ix': '%(table_name)s_idx_%(column_0_name)s',
        'pk': '%(table_name)s_pkey',
    })

import mbdata.config  # noqa: E402
mbdata.config.configure(metadata=metadata, schema='musicbrainz')

sqlalchemy.event.listen(
    metadata,
    'before_create',
    DDL('CREATE SCHEMA IF NOT EXISTS musicbrainz'),
)

account = Table(
    'account',
    metadata,
Example #22
0
File: api.py Project: kvh/dcp
 def get_sqlalchemy_metadata(self):
     sa_engine = self.get_engine()
     meta = MetaData()
     meta.reflect(bind=sa_engine)
     return meta
Example #23
0

app = Flask(__name__)


#################################################
# Database Setup
#################################################
DATABASE_URL = os.environ['DATABASE_URL']
app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URL
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
# for local "postgres://*****:*****@@127.0.0.1/inc5000"
db = SQLAlchemy(app)


metadata = MetaData(bind=db.engine) 
# inc2018_data = Table('inc2018_data', metadata, autoload_with=db.engine) 

inc2018_data = Table('inc2018_data', metadata, autoload_with=db.engine) 


#session = Session(db.engine)
# reflect an existing database into a new model
Base = automap_base()
# reflect the tables
Base.prepare(db.engine, reflect=True)

session = Session(db.engine)

Base.classes.keys()
# Save references to each table
Example #24
0
def create_new_spine_database(db_url, upgrade=True, for_spine_model=False):
    """Create a new Spine database at the given url."""
    try:
        engine = create_engine(db_url)
    except DatabaseError as e:
        raise SpineDBAPIError("Could not connect to '{}': {}".format(
            db_url, e.orig.args))
    # Drop existing tables. This is a Spine db now...
    meta = MetaData(engine)
    meta.reflect()
    meta.drop_all(engine)
    # Create new tables
    meta = MetaData(naming_convention=naming_convention)
    Table(
        "commit",
        meta,
        Column("id", Integer, primary_key=True),
        Column("comment", String(255), nullable=False),
        Column("date", DateTime, nullable=False),
        Column("user", String(45)),
    )
    object_class_category = Table(
        "object_class_category",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    object_class = Table(
        "object_class",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("category_id",
               Integer,
               ForeignKey("object_class_category.id"),
               server_default=null()),
        Column("display_order", Integer, server_default="99"),
        Column("display_icon", BigInteger, server_default=null()),
        Column("hidden", Integer, server_default="0"),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "object_category",
        meta,
        Column("id", Integer, primary_key=True),
        Column("object_class_id", Integer, ForeignKey("object_class.id")),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "object",
        meta,
        Column("id", Integer, primary_key=True),
        Column(
            "class_id", Integer,
            ForeignKey("object_class.id",
                       onupdate="CASCADE",
                       ondelete="CASCADE")),
        Column("name", String(255), nullable=False, unique=True),
        Column("description", String(255), server_default=null()),
        Column("category_id", Integer, ForeignKey("object_category.id")),
        Column("commit_id", Integer, ForeignKey("commit.id")),
    )
    Table(
        "relationship_class",
        meta,
        Column("id", Integer, primary_key=True),
        Column("dimension", Integer, primary_key=True),
        Column("object_class_id", Integer, ForeignKey("object_class.id")),
        Column("name", String(255), nullable=False),
        Column("hidden", Integer, server_default="0"),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        UniqueConstraint("dimension", "name"),
    )
    Table(
        "relationship",
        meta,
        Column("id", Integer, primary_key=True),
        Column("dimension", Integer, primary_key=True),
        Column("object_id", Integer, ForeignKey("object.id")),
        Column("class_id", Integer, nullable=False),
        Column("name", String(255), nullable=False),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        UniqueConstraint("dimension", "name"),
        ForeignKeyConstraint(
            ("class_id", "dimension"),
            ("relationship_class.id", "relationship_class.dimension"),
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
    )
    Table(
        "parameter",
        meta,
        Column("id", Integer, primary_key=True),
        Column("name", String(155), nullable=False, unique=True),
        Column("description", String(155), server_default=null()),
        Column("data_type", String(155), server_default="NUMERIC"),
        Column("relationship_class_id", Integer, default=null()),
        Column(
            "object_class_id",
            Integer,
            ForeignKey("object_class.id",
                       onupdate="CASCADE",
                       ondelete="CASCADE"),
            server_default=null(),
        ),
        Column("can_have_time_series", Integer, server_default="0"),
        Column("can_have_time_pattern", Integer, server_default="1"),
        Column("can_be_stochastic", Integer, server_default="0"),
        Column("default_value", String(155), server_default="0"),
        Column("is_mandatory", Integer, server_default="0"),
        Column("precision", Integer, server_default="2"),
        Column("unit", String(155), server_default=null()),
        Column("minimum_value", Float, server_default=null()),
        Column("maximum_value", Float, server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        CheckConstraint(
            "`relationship_class_id` IS NOT NULL OR `object_class_id` IS NOT NULL"
        ),
    )
    Table(
        "parameter_value",
        meta,
        Column("id", Integer, primary_key=True),
        Column(
            "parameter_id", Integer,
            ForeignKey("parameter.id", onupdate="CASCADE",
                       ondelete="CASCADE")),
        Column("relationship_id", Integer, server_default=null()),
        Column("dummy_relationship_dimension", Integer, server_default="0"),
        Column("object_id",
               Integer,
               ForeignKey("object.id", onupdate="CASCADE", ondelete="CASCADE"),
               server_default=null()),
        Column("index", Integer, server_default="1"),
        Column("value", String(155), server_default=null()),
        Column("json", String(255), server_default=null()),
        Column("expression", String(155), server_default=null()),
        Column("time_pattern", String(155), server_default=null()),
        Column("time_series_id", String(155), server_default=null()),
        Column("stochastic_model_id", String(155), server_default=null()),
        Column("commit_id", Integer, ForeignKey("commit.id")),
        CheckConstraint(
            "`relationship_id` IS NOT NULL OR `object_id` IS NOT NULL"),
        UniqueConstraint("parameter_id", "object_id"),
        UniqueConstraint("parameter_id", "relationship_id"),
        ForeignKeyConstraint(
            ("relationship_id", "dummy_relationship_dimension"),
            ("relationship.id", "relationship.dimension"),
            onupdate="CASCADE",
            ondelete="CASCADE",
        ),
    )
    try:
        meta.create_all(engine)
    except DatabaseError as e:
        raise SpineDBAPIError("Unable to create Spine database: {}".format(
            e.orig.args))
    if not upgrade:
        return engine
    is_head(db_url, upgrade=True)
    if not for_spine_model:
        return engine
    # Add specific data structure for Spine Model
    meta = MetaData(engine, reflect=True)
    object_class = meta.tables["object_class"]
    object_ = meta.tables["object"]
    relationship_class = meta.tables["relationship_class"]
    parameter_definition = meta.tables["parameter_definition"]
    parameter_tag = meta.tables["parameter_tag"]
    parameter_definition_tag = meta.tables["parameter_definition_tag"]
    obj_cls = lambda *x: dict(
        zip(("id", "name", "description", "display_order", "display_icon"), x))
    obj = lambda *x: dict(zip(("class_id", "name", "description"), x))
    rel_cls = lambda *x: dict(
        zip(("id", "dimension", "object_class_id", "name"), x))
    obj_par_def = lambda *x: dict(
        zip(("id", "name", "object_class_id", "default_value"), x))
    rel_par_def = lambda *x: dict(
        zip(("id", "name", "relationship_class_id", "default_value"), x))
    par_tag = lambda *x: dict(zip(("id", "tag", "description"), x))
    par_def_tag = lambda *x: dict(
        zip(("parameter_definition_id", "parameter_tag_id"), x))
    try:
        engine.execute(
            object_class.insert(),
            [
                obj_cls(1, "direction", "A flow direction", 1, 281105626296654,
                        0),
                obj_cls(
                    2, "unit",
                    "An entity where an energy conversion process takes place",
                    2, 281470681805429, 0),
                obj_cls(3, "connection",
                        "An entity where an energy transfer takes place", 3,
                        280378317271233, 0),
                obj_cls(4, "storage", "A storage", 4, 280376899531934, 0),
                obj_cls(5, "commodity", "A commodity", 5, 281473533932880, 0),
                obj_cls(6, "node",
                        "An entity where an energy balance takes place", 6,
                        280740554077951, 0),
                obj_cls(7, "temporal_block", "A temporal block", 7,
                        280376891207703, 0),
            ],
        )
        engine.execute(
            object_.insert(),
            [
                obj(1, "from_node", "From a node, into something else"),
                obj(1, "to_node", "Into a node, from something else"),
            ],
        )
        engine.execute(
            relationship_class.insert(),
            [
                rel_cls(1, 0, 2, "unit__node__direction__temporal_block"),
                rel_cls(1, 1, 6, "unit__node__direction__temporal_block"),
                rel_cls(1, 2, 1, "unit__node__direction__temporal_block"),
                rel_cls(1, 3, 7, "unit__node__direction__temporal_block"),
                rel_cls(2, 0, 3,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 1, 6,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 2, 1,
                        "connection__node__direction__temporal_block"),
                rel_cls(2, 3, 7,
                        "connection__node__direction__temporal_block"),
                rel_cls(3, 0, 6, "node__commodity"),
                rel_cls(3, 1, 5, "node__commodity"),
                rel_cls(4, 0, 2, "unit_group__unit"),
                rel_cls(4, 1, 2, "unit_group__unit"),
                rel_cls(5, 0, 5, "commodity_group__commodity"),
                rel_cls(5, 1, 5, "commodity_group__commodity"),
                rel_cls(6, 0, 6, "node_group__node"),
                rel_cls(6, 1, 6, "node_group__node"),
                rel_cls(7, 0, 2, "unit_group__commodity_group"),
                rel_cls(7, 1, 5, "unit_group__commodity_group"),
                rel_cls(8, 0, 5, "commodity_group__node_group"),
                rel_cls(8, 1, 6, "commodity_group__node_group"),
                rel_cls(9, 0, 2, "unit__commodity"),
                rel_cls(9, 1, 5, "unit__commodity"),
                rel_cls(10, 0, 2, "unit__commodity__direction"),
                rel_cls(10, 1, 5, "unit__commodity__direction"),
                rel_cls(10, 2, 1, "unit__commodity__direction"),
                rel_cls(11, 0, 2, "unit__commodity__commodity"),
                rel_cls(11, 1, 5, "unit__commodity__commodity"),
                rel_cls(11, 2, 5, "unit__commodity__commodity"),
                rel_cls(12, 0, 3, "connection__node__direction"),
                rel_cls(12, 1, 6, "connection__node__direction"),
                rel_cls(12, 2, 1, "connection__node__direction"),
                rel_cls(13, 0, 3, "connection__node__node"),
                rel_cls(13, 1, 6, "connection__node__node"),
                rel_cls(13, 2, 6, "connection__node__node"),
                rel_cls(14, 0, 6, "node__temporal_block"),
                rel_cls(14, 1, 7, "node__temporal_block"),
                rel_cls(15, 0, 4, "storage__unit"),
                rel_cls(15, 1, 2, "storage__unit"),
                rel_cls(16, 0, 4, "storage__connection"),
                rel_cls(16, 1, 3, "storage__connection"),
                rel_cls(17, 0, 4, "storage__commodity"),
                rel_cls(17, 1, 5, "storage__commodity"),
            ],
        )
        engine.execute(
            parameter_definition.insert(),
            [
                obj_par_def(1, "fom_cost", 2, "null"),
                obj_par_def(2, "start_up_cost", 2, "null"),
                obj_par_def(3, "shut_down_cost", 2, "null"),
                obj_par_def(4, "number_of_units", 2, 1),
                obj_par_def(5, "avail_factor", 2, 1),
                obj_par_def(6, "min_down_time", 2, 0),
                obj_par_def(7, "min_up_time", 2, 0),
                obj_par_def(8, "start_datetime", 7, "null"),
                obj_par_def(9, "end_datetime", 7, "null"),
                obj_par_def(10, "time_slice_duration", 7, "null"),
                obj_par_def(11, "demand", 6, 0),
                obj_par_def(12, "online_variable_type", 2,
                            '"integer_online_variable"'),
                obj_par_def(13, "fix_units_on", 2, "null"),
                obj_par_def(14, "stor_state_cap", 4, 0),
                obj_par_def(15, "frac_state_loss", 4, 0),
            ],
        )
        engine.execute(
            parameter_definition.insert(),
            [
                rel_par_def(1001, "unit_conv_cap_to_flow", 9, 1),
                rel_par_def(1002, "unit_capacity", 10, "null"),
                rel_par_def(1003, "operating_cost", 10, "null"),
                rel_par_def(1004, "vom_cost", 10, "null"),
                rel_par_def(1005, "tax_net_flow", 8, "null"),
                rel_par_def(1006, "tax_out_flow", 8, "null"),
                rel_par_def(1007, "tax_in_flow", 8, "null"),
                rel_par_def(1008, "fix_ratio_out_in", 11, "null"),
                rel_par_def(1009, "fix_ratio_out_in", 12, "null"),
                rel_par_def(1010, "max_ratio_out_in", 11, "null"),
                rel_par_def(1011, "max_ratio_out_in", 12, "null"),
                rel_par_def(1012, "min_ratio_out_in", 11, "null"),
                rel_par_def(1013, "min_ratio_out_in", 12, "null"),
                rel_par_def(1014, "minimum_operating_point", 9, "null"),
                rel_par_def(1017, "stor_unit_discharg_eff", 15, 1),
                rel_par_def(1018, "stor_unit_charg_eff", 15, 1),
                rel_par_def(1019, "stor_conn_discharg_eff", 16, 1),
                rel_par_def(1020, "stor_conn_charg_eff", 16, 1),
                rel_par_def(1021, "max_cum_in_flow_bound", 7, "null"),
                rel_par_def(1022, "fix_flow", 10, "null"),
                rel_par_def(1023, "fix_trans", 12, "null"),
            ],
        )
        engine.execute(
            parameter_tag.insert(),
            [
                par_tag(1, "duration", "duration in time"),
                par_tag(2, "date_time", "a specific point in time"),
                par_tag(3, "time_series", "time series data"),
                par_tag(4, "time_pattern", "time patterned data"),
            ],
        )
        engine.execute(
            parameter_definition_tag.insert(),
            [
                par_def_tag(11, 3),
                par_def_tag(10, 1),
                par_def_tag(8, 2),
                par_def_tag(9, 2)
            ],
        )
    except DatabaseError as e:
        raise SpineDBAPIError(
            "Unable to add specific data structure for Spine Model: {}".format(
                e.orig.args))
    return engine
Example #25
0
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import Session

from crate_anon.anonymise.config_singleton import config
from crate_anon.anonymise.constants import (
    MAX_TRID,
    TABLE_KWARGS,
    TridType,
)

if TYPE_CHECKING:
    from crate_anon.anonymise.scrub import PersonalizedScrubber

log = logging.getLogger(__name__)
admin_meta = MetaData()
AdminBase = declarative_base(metadata=admin_meta)


class PatientInfoConstants(object):
    SECRET_MAP_TABLENAME = 'secret_map'
    PID_FIELDNAME = "pid"
    MPID_FIELDNAME = "mpid"
    RID_FIELDNAME = "rid"
    MRID_FIELDNAME = "mrid"
    TRID_FIELDNAME = "trid"


class PatientInfo(AdminBase):
    """
    Represent patient information in the secret admin database.
Example #26
0
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql import func


convention = {
    'all_column_names': lambda constraint, table: '_'.join([
        column.name for column in constraint.columns.values()
    ]),
    'ix': 'ix__%(table_name)s__%(all_column_names)s',
    'uq': 'uq__%(table_name)s__%(all_column_names)s',
    'ck': 'ck__%(table_name)s__%(constraint_name)s',
    'fk': 'fk__%(table_name)s__%(all_column_names)s__%(referred_table_name)s',
    'pk': 'pk__%(table_name)s'
}

metadata = MetaData(naming_convention=convention)

names_table = Table(
    'names',
    metadata,
    Column('id', Integer, autoincrement=True, primary_key=True, unique=True),
    Column('name', String, nullable=False),
    Column(
        'local_uuid', UUID(as_uuid=True),
        primary_key=True, nullable=False, unique=True
        ),
    Column('date', DateTime(timezone=True), server_default=func.now()),
)

parse_company_table = Table(
    'parsed_company',
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

from typing import Any

from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base

from airflow.configuration import conf

SQL_ALCHEMY_SCHEMA = conf.get("core", "SQL_ALCHEMY_SCHEMA")

metadata = (None if not SQL_ALCHEMY_SCHEMA or SQL_ALCHEMY_SCHEMA.isspace() else
            MetaData(schema=SQL_ALCHEMY_SCHEMA))
Base = declarative_base(metadata=metadata)  # type: Any

ID_LEN = 250


# used for typing
class Operator:
    pass


def get_id_collation_args():
    collation = conf.get('core', 'sql_engine_collation_for_ids', fallback=None)
    if collation:
        return {'collation': collation}
    else:
Example #28
0
 def test_reflect_all(self, connection):
     m = MetaData()
     m.reflect(connection)
     eq_(set(t.name for t in m.tables.values()), set(["admin_docindex"]))
from sqlalchemy.exc import NoSuchTableError

# Need our custom types, but don't import anything else from model
from galaxy.model.custom_types import TrimmedString

log = logging.getLogger( __name__ )
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler( sys.stdout )
format = "%(name)s %(levelname)s %(asctime)s %(message)s"
formatter = logging.Formatter( format )
handler.setFormatter( formatter )
log.addHandler( handler )

now = datetime.datetime.utcnow

metadata = MetaData()

SkipToolTest_table = Table( "skip_tool_test", metadata,
                            Column( "id", Integer, primary_key=True ),
                            Column( "create_time", DateTime, default=now ),
                            Column( "update_time", DateTime, default=now, onupdate=now ),
                            Column( "repository_metadata_id", Integer, ForeignKey( "repository_metadata.id" ), index=True ),
                            Column( "initial_changeset_revision", TrimmedString( 255 ), index=True ),
                            Column( "comment" , TEXT ) )


def upgrade( migrate_engine ):
    print __doc__
    metadata.bind = migrate_engine
    metadata.reflect()
    # Initialize.
Example #30
0
 def _selfref_fk_fixture(self, table_args=(), table_kwargs={}):
     m = MetaData()
     t = Table('tname', m, Column('id', Integer, primary_key=True),
               Column('parent_id', Integer, ForeignKey('tname.id')),
               Column('data', String))
     return ApplyBatchImpl(t, table_args, table_kwargs)