コード例 #1
0
def table_dml(engine):
    return Table('test_pybigquery.sample_dml',
                 MetaData(bind=engine),
                 autoload=True)
コード例 #2
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    datastore_versions = Table('datastore_versions', meta, autoload=True)
    # modify column
    datastore_versions.c.name.alter(unique=False)
コード例 #3
0
 def setUp(self):
     self.metadata = MetaData()
     self.declarative_base = declarative_base(metadata=self.metadata)
     self.SADatum = None
コード例 #4
0
    def __thd_clean_database(self, conn):
        # In general it's nearly impossible to do "bullet proof" database
        # cleanup with SQLAlchemy that will work on a range of databases
        # and they configurations.
        #
        # Following approaches were considered.
        #
        # 1. Drop Buildbot Model schema:
        #
        #     model.Model.metadata.drop_all(bind=conn, checkfirst=True)
        #
        # Dropping schema from model is correct and working operation only
        # if database schema is exactly corresponds to the model schema.
        #
        # If it is not (e.g. migration script failed or migration results in
        # old version of model), then some tables outside model schema may be
        # present, which may reference tables in the model schema.
        # In this case either dropping model schema will fail (if database
        # enforces referential integrity, e.g. PostgreSQL), or
        # dropping left tables in the code below will fail (if database allows
        # removing of tables on which other tables have references,
        # e.g. SQLite).
        #
        # 2. Introspect database contents and drop found tables.
        #
        #     meta = MetaData(bind=conn)
        #     meta.reflect()
        #     meta.drop_all()
        #
        # May fail if schema contains reference cycles (and Buildbot schema
        # has them). Reflection looses metadata about how reference cycles
        # can be teared up (e.g. use_alter=True).
        # Introspection may fail if schema has invalid references
        # (e.g. possible in SQLite).
        #
        # 3. What is actually needed here is accurate code for each engine
        # and each engine configuration that will drop all tables,
        # indexes, constraints, etc in proper order or in a proper way
        # (using tables alternation, or DROP TABLE ... CASCADE, etc).
        #
        # Conclusion: use approach 2 with manually teared apart known
        # reference cycles.

        # pylint: disable=too-many-nested-blocks

        try:
            meta = MetaData(bind=conn)

            # Reflect database contents. May fail, e.g. if table references
            # non-existent table in SQLite.
            meta.reflect()

            # Table.foreign_key_constraints introduced in SQLAlchemy 1.0.
            if sa_version()[:2] >= (1, 0):
                # Restore `use_alter` settings to break known reference cycles.
                # Main goal of this part is to remove SQLAlchemy warning
                # about reference cycle.
                # Looks like it's OK to do it only with SQLAlchemy >= 1.0.0,
                # since it's not issued in SQLAlchemy == 0.8.0

                # List of reference links (table_name, ref_table_name) that
                # should be broken by adding use_alter=True.
                table_referenced_table_links = [('buildsets', 'builds'),
                                                ('builds', 'buildrequests')]
                for table_name, ref_table_name in table_referenced_table_links:
                    if table_name in meta.tables:
                        table = meta.tables[table_name]
                        for fkc in table.foreign_key_constraints:
                            if fkc.referred_table.name == ref_table_name:
                                fkc.use_alter = True

            # Drop all reflected tables and indices. May fail, e.g. if
            # SQLAlchemy wouldn't be able to break circular references.
            # Sqlalchemy fk support with sqlite is not yet perfect, so we must deactivate fk during that
            # operation, even though we made our possible to use use_alter
            with withoutSqliteForeignKeys(conn.engine, conn):
                meta.drop_all()

        except Exception:
            # sometimes this goes badly wrong; being able to see the schema
            # can be a big help
            if conn.engine.dialect.name == 'sqlite':
                r = conn.execute("select sql from sqlite_master "
                                 "where type='table'")
                log.msg("Current schema:")
                for row in r.fetchall():
                    log.msg(row.sql)
            raise
コード例 #5
0
 def reflect(self, meta=None):
     """Reflects tables from the database. """
     meta = meta or MetaData()
     meta.reflect(bind=self.engine)
     return meta
コード例 #6
0
ファイル: db.py プロジェクト: xuexianwu/ugrid-tools
from sqlalchemy.ext.declarative.api import declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.schema import MetaData, Column
from sqlalchemy.types import Integer, String

# connstr = 'sqlite://'
# connstr = 'postgresql://*****:*****@localhost/<database>'
# connstr = 'postgresql://{user}:{password}@{host}/{database}'
## four slashes for absolute paths - three for relative
db_path = '/home/benkoziol/l/project/pmesh/src/analysis/nodes.sqlite'
connstr = 'sqlite:///{0}'.format(db_path)

engine = create_engine(connstr)
metadata = MetaData(bind=engine)
Base = declarative_base(metadata=metadata)
Session = sessionmaker(bind=engine)


class Shapefile(Base):
    __tablename__ = 'shapefile'
    sid = Column(Integer, primary_key=True)
    fullpath = Column(String, unique=True, nullable=False)
    key = Column(String, nullable=False)
    catchment = relationship("Catchment", backref=backref("shapefile", uselist=False))

    def get_area(self):
        """
        :returns: Area in square meters.
        :rtype: float
コード例 #7
0
ファイル: test_operators.py プロジェクト: yoloseem/sqlalchemy
 def test_alias_proxy(self):
     t = Table('t', MetaData(), Column('foo', self._add_override_factory()))
     proxied = t.alias().c.foo
     self._assert_add_override(proxied)
コード例 #8
0
ファイル: models.py プロジェクト: violet4/network_tracker
#!/usr/bin/env python
from sqlalchemy import (BigInteger, Text, Integer, DateTime, ForeignKey,
                        Column)

from sqlalchemy.ext.declarative import declarative_base

from sqlalchemy.schema import MetaData
from sqlalchemy.orm import sessionmaker, relationship

from network_tracker_config import engine, network_schema_name

SABase = declarative_base(metadata=MetaData(
    bind=engine,
    schema=network_schema_name,
), )


class Helper:
    @classmethod
    def get_row(cls, column, value):
        sess = cls.get_sess()
        query = sess.query(cls).filter(column == value)

        # if it exists, then return it
        row = query.one_or_none()
        if row is not None:
            return row

        # otherwise, create one
        row = cls()
        setattr(row, str(column).split('.')[-1], value)
コード例 #9
0
ファイル: env.py プロジェクト: mahjong/ziggurat_foundations
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name:
    fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata

target_metadata = MetaData(
    naming_convention={
        "ix": "ix_%(column_0_label)s",
        "uq": "uq_%(table_name)s_%(column_0_name)s",
        "ck": "ck_%(table_name)s_%(constraint_name)s",
        "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
        "pk": "pk_%(table_name)s",
    })


def get_url():
    url = os.getenv("DB_URL", "")
    if url == "":
        url = config.get_main_option("sqlalchemy.url")

    return url


# target_metadata = None
コード例 #10
0
 def test_has_table(self, engine, connection):
     self.assertTrue(Table('one_row', MetaData(bind=engine)).exists())
     self.assertFalse(
         Table('this_table_does_not_exist', MetaData(bind=engine)).exists())
コード例 #11
0
 def __init__(self, dsn):
     engine = create_engine(dsn)
     self._metadata = MetaData(engine)
コード例 #12
0
def test_querying_wildcard_tables(engine):
    table = Table('bigquery-public-data.noaa_gsod.gsod*',
                  MetaData(bind=engine),
                  autoload=True)
    rows = table.select().limit(1).execute().first()
    assert len(rows) > 0
コード例 #13
0
def test_reflect_dataset_does_not_exist(engine):
    with pytest.raises(NoSuchTableError):
        Table('dataset_does_not_exist.table_does_not_exist',
              MetaData(bind=engine),
              autoload=True)
コード例 #14
0
def test_reflect_select_shared_table(engine):
    one_row = Table('bigquery-public-data.samples.natality',
                    MetaData(bind=engine),
                    autoload=True)
    row = one_row.select().limit(1).execute().first()
    assert len(row) >= 1
コード例 #15
0
def client_table():
    client_table = Table(u"client", MetaData(), Column(u"id", Integer()),
                         Column(u"name", String(50)))
    client_table.foreign_keys = []
    return client_table
コード例 #16
0
ファイル: __init__.py プロジェクト: procool/itstructure
class __Database(object):
    __metaclass__ = Singleton
    _binds = {}
    _connections = {}
    _db_alias = getattr(settings, "DEFAULT_DB_ALIAS", 'database')
    _metadata = MetaData()
    _metadata_loaded = False

    ## name of module for SA model (default: sa_models.py):
    _sa_models_module_name = getattr(
        settings,
        "SQLALCHEMY_MODELS_MODULE_NAME",
        "sa_models"
    )


    def _get_metadata(self):
        return self.__class__._metadata


    def _set_active_db(self, db_alias):
        self.__class__._db_alias = db_alias

    def _get_engine(self, echo=False, bind_key=None, access=None):
        db_cfg = getattr(settings, 'SA_DATABASES', None)
        if db_cfg is None:
            raise RuntimeError("Please, specify database connection settings")

        if isinstance(access, str):
            access = list(access)
            

        ## No bind_key, try to get it by access:
        if bind_key is None and isinstance(access, (list, tuple)):
            for db_ident, db_params in db_cfg.iteritems():
                for acc_ in access:
                    if acc_ in db_params[1] or 'a' in db_params[1]:
                        bind_key = db_ident
                        break
                if bind_key is not None:
                    break

        db_alias = bind_key or self.__class__._db_alias
        if db_alias in self.__class__._binds:
            return self.__class__._binds[db_alias][0]
        if not db_alias in db_cfg:
            raise RuntimeError("Unknown DB alias '%s'" % db_alias)

        db = db_cfg[db_alias][0]
        access = db_cfg[db_alias][1]
        log.debug(u"Configure engine to %s", db)
        if db.startswith('sqlite'):
            engine = create_engine(db, echo=echo)
        else:
            proxy = TimingProxy()

            pool_size = int(os.environ.get('DB_POOL_SIZE', 5))
            if pool_size == 0:
                return None
            engine = create_engine(
                db,
                echo=echo,
                max_overflow=0,
                pool_size=pool_size,
                poolclass=GreenQueuePool,
                proxy=proxy,
                pool_timeout=5 #if not getattr(settings, "PRODUCTION") else 30,
            )

        self.__class__._binds[db_alias] = [engine, access,]
        return engine


    def _get_db_session(self, bind_key=None, access=None, echo=True, **kwargs):
        db_alias = bind_key or self.__class__._db_alias
        if db_alias not in _database._connections:
            engine = self._get_engine(bind_key=bind_key, echo=echo, access=access)
            self.__class__._connections[db_alias] = scoped_session(sessionmaker(
                bind=engine,
                class_=DBSession,
                expire_on_commit=False,
            ))

        connections_ = self.__class__._connections[db_alias]

        session = connections_(**kwargs)
        log.debug(u"DB<%s>: initiated as %s <%s>", id(session), db_alias, session.bind.pool.status())
        return session


    @staticmethod
    def __get_models_path(dirname):
        for path in sys.path:
            p = "/".join((path, dirname))
            if os.path.exists(p):
                return p

    @classmethod
    def __load_models_by_path(cls, dirpath):
        path_ = cls.__get_models_path(dirpath)
        if path_ is None:
            raise StopIteration()
        sys.path.append(path_)

        for name in os.listdir(path_):
            fpath = os.path.abspath("/".join((path_, name)))
            fstat = os.stat(fpath)

            ## Not a model directory, skip:
            if stat.S_ISREG(fstat.st_mode):
                continue

            ## Generate name of module to load for _load_models:
            yield name



    def _load_models(self, modules=None, dirpath=None):
        if modules is None and dirpath is not None:
            modules = self.__load_models_by_path(dirpath)
        if modules is None:
            modules = getattr(settings, "INSTALLED_APPS", ())
        apps = []
        for app in modules:
            module = __import__(app, fromlist=[self._sa_models_module_name])
            if hasattr(module, self._sa_models_module_name):
                apps.append(module)
        return apps
コード例 #17
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    instances = Table('instances', meta, autoload=True)
    instances.drop_column(COLUMN_NAME)
コード例 #18
0
 def test_lots_of_types(self, engine, connection):
     # Presto doesn't have raw CREATE TABLE support, so we ony test hive
     # take type list from sqlalchemy.types
     types = [
         'INT',
         'CHAR',
         'VARCHAR',
         'NCHAR',
         'TEXT',
         'Text',
         'FLOAT',
         'NUMERIC',
         'DECIMAL',
         'TIMESTAMP',
         'DATETIME',
         'CLOB',
         'BLOB',
         'BOOLEAN',
         'SMALLINT',
         'DATE',
         'TIME',
         'String',
         'Integer',
         'SmallInteger',
         'Numeric',
         'Float',
         'DateTime',
         'Date',
         'Time',
         'Binary',
         'Boolean',
         'Unicode',
         'UnicodeText',
     ]
     cols = []
     for i, t in enumerate(types):
         cols.append(Column(str(i), getattr(sqlalchemy.types, t)))
     cols.append(Column('hive_date', HiveDate))
     cols.append(Column('hive_decimal', HiveDecimal))
     cols.append(Column('hive_timestamp', HiveTimestamp))
     table = Table('test_table',
                   MetaData(bind=engine),
                   *cols,
                   schema='pyhive_test_database')
     table.drop(checkfirst=True)
     table.create()
     connection.execute('SET mapred.job.tracker=local')
     connection.execute('USE pyhive_test_database')
     big_number = 10**10 - 1
     connection.execute(
         """
     INSERT OVERWRITE TABLE test_table
     SELECT
         1, "a", "a", "a", "a", "a", 0.1,
         0.1, 0.1, 0, 0, "a", "a",
         false, 1, 0, 0,
         "a", 1, 1,
         0.1, 0.1, 0, 0, 0, "a",
         false, "a", "a",
         0, %d, 123 + 2000
     FROM default.one_row
     """, big_number)
     row = connection.execute(table.select()).fetchone()
     self.assertEqual(row.hive_date, datetime.date(1970, 1, 1))
     self.assertEqual(row.hive_decimal, decimal.Decimal(big_number))
     self.assertEqual(row.hive_timestamp,
                      datetime.datetime(1970, 1, 1, 0, 0, 2, 123))
     table.drop()
コード例 #19
0
from sqlalchemy.types import PickleType, String
from zope.sqlalchemy import register as register_transaction_listener

from .constants import (
    ASSET_TYPE_BY_ID,
    RECORD_ID_LENGTH,
    RECORD_RETRY_COUNT)
from .exceptions import DatabaseRecordError
from .macros.security import make_random_string


CLASS_REGISTRY = {}
metadata = MetaData(naming_convention={
    'ix': 'ix_%(column_0_label)s',
    'uq': 'uq_%(table_name)s_%(column_0_name)s',
    'ck': 'ck_%(table_name)s_%(constraint_name)s',
    'fk': 'fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s',
    'pk': 'pk_%(table_name)s',
})
Base = declarative_base(class_registry=CLASS_REGISTRY, metadata=metadata)
asset_content = Table(
    'asset_content', Base.metadata,
    Column('parent_asset_id', String, ForeignKey('asset.id')),
    Column('child_asset_id', String, ForeignKey('asset.id')))
asset_connection = Table(
    'asset_connection', Base.metadata,
    Column('left_asset_id', String, ForeignKey('asset.id')),
    Column('right_asset_id', String, ForeignKey('asset.id')))


class RecordMixin(object):
コード例 #20
0
 def bindEngine(self, engine, schema_name=None):
     self._metadata = MetaData(engine, schema_name)
     self._clear()
コード例 #21
0
from flask_sqlalchemy import camel_to_snake_case
from requests.exceptions import HTTPError
from sqlalchemy import Column, Table
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy.ext.declarative import as_declarative, declared_attr
from sqlalchemy.orm import relationship
from sqlalchemy.schema import MetaData, ForeignKey
from sqlalchemy.types import Boolean, Float, Text, BigInteger, DateTime
import stripe

from pipet.utils import PipetBase

STRIPE_API_VERSION = '2018-02-28'
SCHEMANAME = 'stripe'
CLASS_REGISTRY = {}
metadata = MetaData(schema=SCHEMANAME)


class EmptyResponse(Exception):
    pass


@as_declarative(metadata=metadata, class_registry=CLASS_REGISTRY)
class Base(PipetBase):
    id = Column(Text, primary_key=True)

    @classmethod
    def object_type(cls):
        return camel_to_snake_case(cls.__name__)

    @classmethod
コード例 #22
0
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_items_table(meta)]
    create_tables(tables)
コード例 #23
0
    def __init__(self, db_path):
        self.db_engine = create_engine(db_path, poolclass=StaticPool)
        self.db_factory = sessionmaker(bind=self.db_engine)
        self.db_session = scoped_session(self.db_factory)
        self.db_metadata = MetaData()
        self.db_base = declarative_base(
            metadata=self.db_metadata, bind=self.db_engine)

        # Set the global objects so that they're used by the plugins
        database.metadata = self.db_metadata
        database.base = self.db_base

        class DBComment(self.db_base):
            __tablename__ = "comments"

            autor = Column(String)
            author_flair_css_class = Column(String)
            author_flair_text = Column(String)
            body = Column(String)
            controversiality = Column(Integer)
            created_utc = Column(DateTime)
            distinguished = Column(String)
            downs = Column(Integer)
            gilded = Column(Integer)
            id = Column(String, primary_key=True)
            link_id = Column(String)
            parent_id = Column(String)
            retrieved_on = Column(DateTime)
            score = Column(Integer)
            subreddit = Column(String)
            subreddit_id = Column(String)
            ups = Column(Integer)

            def __init__(self, comment):
                self.author = comment.author.name
                self.author_flair_css_class = comment.author_flair_css_class
                self.author_flair_text = comment.author_flair_text
                self.body = comment.body
                self.controversiality = comment.controversiality
                self.created_utc = datetime.datetime.utcfromtimestamp(
                    comment.created_utc)
                self.distinguished = comment.distinguished
                self.downs = comment.downs
                self.gilded = comment.gilded
                self.id = comment.id
                self.link_id = comment.link_id
                self.parent_id = comment.parent_id
                self.retrieved_on = datetime.datetime.utcnow()
                self.score = comment.score
                self.subreddit = comment.subreddit.name
                self.subreddit_id = comment.subreddit_id
                self.ups = comment.ups

        class DBSubmission(self.db_base):
            __tablename__ = "submissions"

            archived = Column(Boolean)
            author = Column(String)
            author_flair_css_class = Column(String)
            author_flair_text = Column(String)
            created_utc = Column(DateTime)
            distinguished = Column(String)
            domain = Column(String)
            downs = Column(Integer)
            gilded = Column(Integer)
            id = Column(String, primary_key=True)
            is_self = Column(Boolean)
            hide_score = Column(Boolean)
            link_flair_css_class = Column(String)
            link_flair_text = Column(String)
            num_comments = Column(Integer)
            over_18 = Column(Boolean)
            permalink = Column(String)
            quarantine = Column(Boolean)
            retrieved_on = Column(DateTime)
            score = Column(Integer)
            selftext = Column(String)
            stickied = Column(Boolean)
            subreddit = Column(String)
            subreddit_id = Column(String)
            title = Column(String)
            thumbnail = Column(String)
            url = Column(String)
            ups = Column(Integer)

            def __init__(self, sub):
                self.archived = sub.archived
                self.author = sub.author.name
                self.author_flair_css_class = sub.author_flair_css_class
                self.author_flair_text = sub.author_flair_text
                self.created_utc = datetime.datetime.utcfromtimestamp(
                    sub.created_utc)
                self.distinguished = sub.distinguished
                self.domain = sub.domain
                self.downs = sub.downs
                self.gilded = sub.gilded
                self.id = sub.id
                self.is_self = sub.is_self
                self.hide_score = sub.hide_score
                self.link_flair_css_class = sub.link_flair_css_class
                self.link_flair_text = sub.link_flair_text
                self.num_comments = sub.num_comments
                self.over_18 = sub.over_18
                self.permalink = sub.permalink
                self.quarantine = sub.quarantine
                self.retrieved_on = datetime.datetime.utcnow()
                self.score = sub.score
                self.selftext = sub.selftext
                self.stickied = sub.stickied
                self.subreddit = sub.subreddit.name
                self.subreddit_id = sub.subreddit_id
                self.title = sub.title
                self.thumbnail = sub.thumbnail
                self.url = sub.url
                self.ups = sub.ups

        class DBScheduledEvents(self.db_base):
            __tablename__ = "scheduled_events"
            id = Column(String, primary_key=True)
            file = Column(String)
            func = Column(String)
            args = Column(JSONEncodedDict)
            kwargs = Column(JSONEncodedDict)
            trigger_time = Column(DateTime)
            scheduled_time = Column(DateTime)

            def __init__(self, file, func, args, kwargs, trigger_time):
                self.file = file
                self.func = func
                self.args = args
                self.kwargs = kwargs
                self.trigger_time = trigger_time
                self.scheduled_time = datetime.datetime.utcnow()
                self.id = uuid.uuid4()

        self.DBComment = DBComment
        self.DBSubmission = DBSubmission
        self.DBScheduledEvents = DBScheduledEvents

        # Create built-in tables
        self.create_reddit_tables()
コード例 #24
0
def downgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine
    tables = [define_items_table(meta)]
    drop_tables(tables)
コード例 #25
0
ファイル: __init__.py プロジェクト: ClassesOver/Stories
    
    def to_dict(self):
        data = {
            'id': self.hash_id,
        }
        return data


naming_convention = {
    "ix": 'ix_%(column_0_label)s',
    "uq": "uq_%(table_name)s_%(column_0_name)s",
    "ck": "ck_%(table_name)s_%(column_0_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s"
}
db = SQLAlchemy(metadata=MetaData(naming_convention=naming_convention), model_class=_Model)
migrate = Migrate()
login = LoginManager()
mail = Mail()
bootstrap = Bootstrap()
moment = Moment()
babel = Babel()


def create_app(config_class=Config):
    app = Flask(__name__, static_folder='../../frontend/build', static_url_path='/')
    app.config.from_object(config_class)
    
    db.init_app(app)
    migrate.init_app(app, db, render_as_batch=True)
    login.init_app(app)
コード例 #26
0
def declarative_base(bind=None,
                     metadata=None,
                     mapper=None,
                     cls=object,
                     name='Base',
                     constructor=_declarative_constructor,
                     metaclass=DeclarativeMeta,
                     engine=None):
    """Construct a base class for declarative class definitions.

    The new base class will be given a metaclass that invokes
    :func:`instrument_declarative()` upon each subclass definition, and routes
    later Column- and Mapper-related attribute assignments made on the class
    into Table and Mapper assignments.

    :param bind: An optional :class:`~sqlalchemy.engine.base.Connectable`, will be assigned
      the ``bind`` attribute on the :class:`~sqlalchemy.MetaData` instance.
      The `engine` keyword argument is a deprecated synonym for `bind`.

    :param metadata:
      An optional :class:`~sqlalchemy.MetaData` instance.  All :class:`~sqlalchemy.schema.Table`
      objects implicitly declared by
      subclasses of the base will share this MetaData.  A MetaData instance
      will be create if none is provided.  The MetaData instance will be
      available via the `metadata` attribute of the generated declarative
      base class.

    :param mapper:
      An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`.  Will be
      used to map subclasses to their Tables.

    :param cls:
      Defaults to :class:`object`.  A type to use as the base for the generated
      declarative base class.  May be a type or tuple of types.

    :param name:
      Defaults to ``Base``.  The display name for the generated
      class.  Customizing this is not required, but can improve clarity in
      tracebacks and debugging.

    :param constructor:
      Defaults to declarative._declarative_constructor, an __init__
      implementation that assigns \**kwargs for declared fields and relations
      to an instance.  If ``None`` is supplied, no __init__ will be installed
      and construction will fall back to cls.__init__ with normal Python
      semantics.

    :param metaclass:
      Defaults to :class:`DeclarativeMeta`.  A metaclass or __metaclass__
      compatible callable to use as the meta type of the generated
      declarative base class.

    """
    lcl_metadata = metadata or MetaData()
    if bind or engine:
        lcl_metadata.bind = bind or engine

    bases = not isinstance(cls, tuple) and (cls, ) or cls
    class_dict = dict(_decl_class_registry=dict(), metadata=lcl_metadata)

    if constructor:
        class_dict['__init__'] = constructor
    if mapper:
        class_dict['__mapper_cls__'] = mapper

    return metaclass(name, bases, class_dict)
コード例 #27
0
# -*- coding: utf-8 -*-
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import MetaData

# Recommended naming convention used by Alembic, as various different database
# providers will autogenerate vastly different names making migrations more
# difficult. See: http://alembic.zzzcomputing.com/en/latest/naming.html
NAMING_CONVENTION = {
    "ix": "ix_%(column_0_label)s",
    "uq": "uq__%(table_name)s__%(column_0_name)s",  # Unique constrains
    # TODO - G.M - 28-03-2018 - [Database] Convert database to allow naming convention
    # for ck contraint.
    # "ck": "ck_%(table_name)s_%(constraint_name)s",
    "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
    "pk": "pk_%(table_name)s",
}

metadata = MetaData(naming_convention=NAMING_CONVENTION)
DeclarativeBase = declarative_base(metadata=metadata)
コード例 #28
0
ファイル: db.py プロジェクト: WhiteJoker/SofaSpud
    def __init__(self, dbFileName="sofaspud.db"):

        print "Database is being initialized"

        self.dbFileName = dbFileName
        self.dbPath = os.path.join(app.DATA_DIR, self.dbFileName)
        self.engine = create_engine("sqlite:///%s" % self.dbPath)
        self.metadata = MetaData(self.engine)
        self.session = scoped_session(
            sessionmaker(bind=self.engine, autocommit=True))

        # DB exists, do upgrade
        if os.path.isfile(self.dbPath):
            self.doUpgrade = True
        else:
            self.doUpgrade = False

        # DB VERSION
        latestDatabaseVersion = 1

        dbVersionTable = Table('DbVersion', self.metadata,
                               Column('version', Integer, primary_key=True))

        movieTable = Table(
            'Movie', self.metadata, Column('id', Integer, primary_key=True),
            Column('dateAdded', DateTime(), default=datetime.datetime.utcnow),
            Column('dateChanged', DateTime(),
                   default=datetime.datetime.utcnow), Column('name', String()),
            Column('year', Integer), Column('status', String()),
            Column('movieDb', String()))

        serieTable = Table(
            'Serie',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('dateAdded', DateTime(), default=datetime.datetime.utcnow),
            Column('dateChanged', DateTime(),
                   default=datetime.datetime.utcnow),
            Column('tvDb', String()),
            Column('name', String()),
            Column('overview', Text()),
            Column('network', String()),
            Column('genre', String()),
            Column('runtime', String()),
            Column('airing', String()),
            Column('startYear', String()),
            Column('language', String()),
            Column('status', String()),
        )

        episodeTable = Table(
            'Episode',
            self.metadata,
            Column('id', Integer, primary_key=True),
            Column('dateAdded', DateTime(), default=datetime.datetime.utcnow),
            Column('dateChanged', DateTime(),
                   default=datetime.datetime.utcnow),
            Column('serieid', Integer, ForeignKey('Serie.id')),
            Column('tvDb', String()),
            Column('name', String()),
            Column('season', Integer),
            Column('episode', Integer),
            Column('description', Text()),
            Column('airDate', DateTime()),
            Column('status', String()),
        )

        # Mappers
        versionMapper = mapper(DbVersion, dbVersionTable)
        movieMapper = mapper(Movie, movieTable)
        serieMapper = mapper(Serie,
                             serieTable,
                             properties={'episode': relation(Episode)})
        episodeMapper = mapper(Episode, episodeTable)

        self.metadata.create_all()

        if self.doUpgrade:
            upgradeDb()
        else:
            for nr in range(1, latestDatabaseVersion + 1):
                self.session.add(DbVersion(nr))
コード例 #29
0
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
DB_PORT = os.getenv("DB_PORT")
DB_NAME = os.getenv("DB_NAME")

connection_string = 'postgresql+psycopg2://{usr}:{pwd}@{host}/air_quality_prod'\
    .format(usr=DB_USERNAME, pwd=DB_PASSWORD, host=DB_HOST)

engine = create_engine(connection_string, echo=False)

Session = sessionmaker(bind=engine, expire_on_commit=False)

Base = declarative_base()

session = Session()
meta = MetaData()
meta.reflect(bind=engine)

connection = psycopg2.connect(user=DB_USERNAME,
                              password=DB_PASSWORD,
                              host=DB_HOST,
                              port=DB_PORT,
                              database=DB_NAME)
cursor = connection.cursor()

# mongo db
MONGO_DB_USERNAME = os.getenv("MONGO_DB_USERNAME")
MONGO_DB_PASSWORD = os.getenv("MONGO_DB_PASSWORD")
MONGO_DB_PORT = os.getenv("MONGO_DB_PORT")
MONGO_DB_NAME = os.getenv("MONGO_DB_NAME")
MONGO_CONN_URI = f'mongodb://{MONGO_DB_USERNAME}:{MONGO_DB_PASSWORD}@{DB_HOST}:{MONGO_DB_PORT}/{MONGO_DB_NAME}'
コード例 #30
0
def table_using_test_dataset(engine_using_test_dataset):
    return Table('sample',
                 MetaData(bind=engine_using_test_dataset),
                 autoload=True)