def MakeClasses(metadata): Base = declarative_base(metadata=metadata) class GBID(Base): __tablename__ = 'gbid' __table_args__ = (UniqueConstraint('name'), {}) id = Column(Integer, primary_key=True) name = Column(String, index = True) offset = Column(Integer) def __init__(self, name, offset): self.name = name self.offset = offset sgjBase = declarative_base(metadata = metadata) class GBID_SequenceJoin(sgjBase): __tablename__ = 'gbid_sequencejoin' __table_args__ = (UniqueConstraint('sequence'), {}) id = Column(Integer, primary_key=True) sequence = Column(Integer, ForeignKey('sequence.id'), index = True) gbid = Column(Integer, ForeignKey('gbid.id')) def __init__(self, gbid, sequence): self.gbid = gbid self.sequence = sequence return dict(GBID = GBID, GBID_SequenceJoin = GBID_SequenceJoin)
def __init__(self, config): """ Initializes the declarative bases that are used throughout the project. Initializes the empty engines and sessionmakers that support `get_session`, `get_mw_session` and `get_ca_session`. """ self.config = config self.WikimetricsBase = declarative_base(cls=SerializableBase) self.MediawikiBase = declarative_base(cls=SerializableBase) self.CentralAuthBase = declarative_base(cls=SerializableBase) self.wikimetrics_engine = None self.wikimetrics_sessionmaker = None self.wikimetrics_session = None self.mediawiki_engines = {} self.mediawiki_sessions = {} self.centralauth_engine = None self.centralauth_sessionmaker = None self.centralauth_session = None # we instantiate project_host_map lazily self._project_host_map = None
def __init__(self, uri='sqlite://', app=None, echo=False, pool_size=None, pool_timeout=None, pool_recycle=None, convert_unicode=True, query_cls=BaseQuery): self.uri = uri self.info = make_url(uri) self.options = self._cleanup_options( echo=echo, pool_size=pool_size, pool_timeout=pool_timeout, pool_recycle=pool_recycle, convert_unicode=convert_unicode, ) self.connector = None self._engine_lock = threading.Lock() self.session = _create_scoped_session(self, query_cls=query_cls) self.Model = declarative_base(cls=Model, name='Model') self.BaseModel = declarative_base(cls=BaseModel, name='BaseModel') self.Model.db, self.BaseModel.db = self, self self.Model.query, self.BaseModel.query = self.session.query, self.session.query if app is not None: self.init_app(app) _include_sqlalchemy(self)
def MakeClasses(metadata): Base = declarative_base(metadata=metadata) class Alignment(Base): __tablename__ = 'alignment' __table_args__ = (UniqueConstraint('name'), {}) id = Column(Integer, primary_key=True) source = Column(String) name = Column(String) def __init__(self, source = 'None'): self.source = source asjBase = declarative_base(metadata = metadata) class Alignment_SequenceJoin(asjBase): __tablename__= 'alignment_sequencejoin' __table_args__ = (UniqueConstraint('sequence'), {}) id = Column(Integer, primary_key=True) alignment = Column(Integer, ForeignKey('alignment.id')) sequence = Column(Integer, ForeignKey('sequence.id')) def __init__(self, alignment, sequence): self.alignment = alignment self.sequence = sequence return dict(Alignment = Alignment, Alignment_SequenceJoin = Alignment_SequenceJoin)
def db_connect(engine, schema=None, clobber=False): """Create a connection object to a database. Attempt to establish a schema. If there are existing tables, delete them if clobber is True and return otherwise. Returns a sqlalchemy engine object. """ if schema is None: base = declarative_base() else: try: engine.execute(sqlalchemy.schema.CreateSchema(schema)) except sqlalchemy.exc.ProgrammingError as err: logging.warn(err) base = declarative_base(metadata=MetaData(schema=schema)) define_schema(base) if clobber: logging.info('Clobbering database tables') base.metadata.drop_all(bind=engine) logging.info('Creating database tables') base.metadata.create_all(bind=engine) return base
def test_custom_mapper(self): class MyExt(sa.orm.MapperExtension): def create_instance(self): return "CHECK" def mymapper(cls, tbl, **kwargs): kwargs['extension'] = MyExt() return sa.orm.mapper(cls, tbl, **kwargs) from sqlalchemy.orm.mapper import Mapper class MyMapper(Mapper): def __init__(self, *args, **kwargs): kwargs['extension'] = MyExt() Mapper.__init__(self, *args, **kwargs) from sqlalchemy.orm import scoping ss = scoping.ScopedSession(create_session) ss.extension = MyExt() ss_mapper = ss.mapper for mapperfunc in (mymapper, MyMapper, ss_mapper): base = decl.declarative_base() class Foo(base): __tablename__ = 'foo' __mapper_cls__ = mapperfunc id = Column(Integer, primary_key=True) eq_(Foo.__mapper__.compile().extension.create_instance(), 'CHECK') base = decl.declarative_base(mapper=mapperfunc) class Foo(base): __tablename__ = 'foo' id = Column(Integer, primary_key=True) eq_(Foo.__mapper__.compile().extension.create_instance(), 'CHECK')
def extended_declarative_base(db_session, forms=True, **kw): if forms: Base = declarative_base(cls=FormBase, **kw) else: Base = declarative_base(cls=ExtendedBase, **kw) Base.query = db_session.query_property() return Base
def MakeClasses(metadata): Base = declarative_base(metadata=metadata) class Gbfile(Base): __tablename__ = 'gbfile' __table_args__ = (UniqueConstraint('name'), {}) id = Column(Integer, primary_key=True) name = Column(String, index = True) def __init__(self, name): self.name = name sgjBase = declarative_base(metadata = metadata) class Gbfile_GBIDJoin(sgjBase): __tablename__ = 'gbfile_gbidjoin' __table_args__ = (UniqueConstraint('gbid'), {}) id = Column(Integer, primary_key=True) gbfile = Column(Integer, ForeignKey('gbfile.id')) gbid = Column(Integer, ForeignKey('gbid.id'), index = True) def __init__(self, gbfile, gbid): self.gbid = gbid self.gbfile = gbfile return dict(Gbfile = Gbfile, Gbfile_GBIDJoin = Gbfile_GBIDJoin)
def test_other_base_twice__different(self): Base = declarative_base() Base2 = declarative_base() contract = {"Group": {"table_name": "groups"}} models1 = self._callFUT(Base, contract) models2 = self._callFUT(Base2, contract) self.assertNotEqual(models1, models2) self.assertNotEqual(models1.Group, models2.Group)
def declarative_bases(metadata, registry=None): """Return all declarative bases bound to a single metadata object.""" if registry is None: registry = dict() return (declarative_base(cls=BaseOps, metadata=metadata, class_registry=registry), declarative_base(cls=Timestamped, metadata=metadata, class_registry=registry))
def test_create_drop_tables_several_meta(self): Base1 = declarative_base() class A1(Base1): __tablename__ = 'A' id = Column(Integer, primary_key=True) Base2 = declarative_base() class A2(Base2): __tablename__ = 'A' id = Column(Integer, primary_key=True) engine1 = create_engine('sqlite://') engine2 = create_engine('sqlite://') binds = { A1.__table__: engine1, A2.__table__: engine2, } meta = { 'm1': Base1.metadata, 'm2': Base2.metadata, 'm3': MetaData(), } cli = Sqla(orm.sessionmaker(binds=binds), metadata=meta) for verbose in [False, True]: cli.command_create_tables(verbose=verbose) self.assertTrue(engine1.has_table('A')) self.assertTrue(engine2.has_table('A')) with mock.patch.object(sys.stdin, 'readline', return_value='y'): cli.command_drop_tables('m1') self.assertFalse(engine1.has_table('A')) self.assertTrue(engine2.has_table('A')) with mock.patch.object(sys.stdin, 'readline', return_value='y'): cli.command_drop_tables() self.assertFalse(engine1.has_table('A')) self.assertFalse(engine2.has_table('A')) cli.command_create_tables('m1', verbose=verbose) self.assertTrue(engine1.has_table('A')) self.assertFalse(engine2.has_table('A')) with mock.patch.object(sys.stdin, 'readline', return_value='y'): cli.command_drop_tables() self.assertFalse(engine1.has_table('A')) self.assertFalse(engine2.has_table('A')) cli.command_create_tables('m3', verbose=verbose) self.assertFalse(engine1.has_table('A')) self.assertFalse(engine2.has_table('A'))
def dummy_mapper(table): Base = declarative_base() class DummyMapper(Base): __table__ = table return DummyMapper
def Init(): global Base,metadata,engine,Session Base=declarative_base() metadata=MetaData() engine=create_engine(cp.Connect_string,pool_size=cp.dbpoolsize,max_overflow=0,pool_timeout=cp.dbpool_timeout,pool_recycle=cp.dbpool_recycle,echo=cp.dbecho) Session = sessionmaker(bind=engine) metadata.reflect(bind=engine)
def table_base(self): """Base class for all tables returned by this database This base class is created using :py:class:`declarative_base <sqlalchemy:sqlalchemy.ext.declarative.declarative_base>`. """ return declarative_base(metadata=self.metadata)
def __init__(self): #self.engine = create_engine('sqlite://///home/weis/code/baltic_rallye_code/common/sequences.db') self.engine = create_engine('sqlite://///home/pi/baltic_rallye_code/common/sequences.db') #self.engine = create_engine('sqlite://///home/weis/Desktop/baltic_rallye_code/common/sequences.db') #self.engine = create_engine('sqlite://///home/weis/code/gpslapse/common/sequences.db') self.base = declarative_base() self.meta = MetaData(bind=self.engine) Session = sessionmaker(bind=self.engine) self.session = Session() # first, get meta-data (structure and datatypes from all tables self.sequences = Table('sequences', self.meta, autoload=True, autoload_with=self.engine) self.sensors = Table('sensors', self.meta, autoload=True, autoload_with=self.engine) self.frames = Table('frames', self.meta, autoload=True, autoload_with=self.engine) self.annot_cars = Table('annot_cars', self.meta, autoload=True, autoload_with=self.engine) self.annot_taillights = Table('annot_taillights', self.meta, autoload=True, autoload_with=self.engine) # now, map the dummy-classes above to the tables, they inherit all fields # and can be used to insert or query from the database orm.mapper(self.Sensor, self.sensors) orm.mapper(self.Sequence, self.sequences) orm.mapper(self.Frame, self.frames) orm.mapper(self.AnnotCar, self.annot_cars) orm.mapper(self.AnnotTaillight, self.annot_taillights)
def make_declarative_base(self): """Creates the declarative base.""" base = declarative_base(cls=Model, name='Model', mapper=signalling_mapper, metaclass=_BoundDeclarativeMeta) base.query = _QueryProperty(self) return base
def define_tables(self, metadata): global User, Address Base = decl.declarative_base(metadata=metadata) class User(Base, ComparableEntity): __tablename__ = 'users' id = Column(Integer, primary_key=True) name = Column(String(50)) class Address(Base, ComparableEntity): __tablename__ = 'addresses' id = Column(Integer, primary_key=True) email = Column(String(50)) user_id = Column(Integer, ForeignKey('users.id')) if inline: if stringbased: user = relation("User", primaryjoin="User.id==Address.user_id", backref="addresses") else: user = relation(User, primaryjoin=User.id==user_id, backref="addresses") if not inline: compile_mappers() if stringbased: Address.user = relation("User", primaryjoin="User.id==Address.user_id", backref="addresses") else: Address.user = relation(User, primaryjoin=User.id==Address.user_id, backref="addresses")
def __init__(self, config: DBConfig) -> None: self.engine = create_engine(config.URI) self.engine.echo = config.ECHO self.Model = declarative_base() self.Model.metadata.bind = self.engine self.__DBSession = sessionmaker(bind=self.engine) self.session = self.__DBSession()
def test_custom_base(self): class MyBase(object): def foobar(self): return "foobar" Base = decl.declarative_base(cls=MyBase) assert hasattr(Base, 'metadata') assert Base().foobar() == "foobar"
def upgrade(): if 'easfoldersyncstatus' in Base.metadata.tables: from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) from inbox.models.session import session_scope from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm.exc import NoResultFound Base = declarative_base() Base.metadata.reflect(engine) from inbox.models.backends.eas import EASFolderSyncStatus from inbox.models import Folder from inbox.util.eas.constants import SKIP_FOLDERS with session_scope(versioned=False, ignore_soft_deletes=False) as \ db_session: statuses = db_session.query(EASFolderSyncStatus).filter( EASFolderSyncStatus.eas_folder_type.in_(SKIP_FOLDERS)).all() for s in statuses: db_session.delete(s) db_session.delete(s.folder) try: for status in db_session.query(EASFolderSyncStatus)\ .join(Folder).filter( Folder.name == 'RecipientInfo').all(): db_session.delete(status) db_session.delete(status.folder) except NoResultFound: pass db_session.commit()
def postprocess_moves(): Base = declarative_base() Session = sessionmaker(bind=op.get_bind()) class Move(Base): __tablename__ = 'move' id = sa.Column(sa.Integer, name="id", primary_key=True) activity = sa.Column(sa.String, name='activity') location_address = sa.Column(sa.String, name="location_address") location_raw = sa.Column(sa.String, name="location_raw") gps_center_max_distance = sa.Column(sa.Float, name="gps_center_max_distance") gps_center_latitude = sa.Column(sa.Float, name="gps_center_latitude") gps_center_longitude = sa.Column(sa.Float, name="gps_center_longitude") class Sample(Base): __tablename__ = 'sample' id = sa.Column(sa.Integer, name="id", primary_key=True) moveId = sa.Column(sa.Integer, sa.ForeignKey(Move.id), name="move_id", nullable=False) move = sa.orm.relationship(Move, backref=sa.orm.backref('samples', lazy='dynamic')) sample_type = sa.Column(sa.String, name='sample_type') longitude = sa.Column(sa.Float, name='longitude') latitude = sa.Column(sa.Float, name='latitude') session = Session() moves_count = session.query(Move).count() for idx, move in enumerate(session.query(Move)): print(u"processing move %d/%d" % (idx + 1, moves_count)) gps_samples = [ sample for sample in move.samples if sample.sample_type and sample.sample_type.startswith('gps-') ] if len(gps_samples) > 0: print(u" got %d GPS samples for move %d: %s" % (len(gps_samples), move.id, move.activity)) gps_center = calculate_gps_center(gps_samples) move.gps_center_latitude = gps_center[0] move.gps_center_longitude = gps_center[1] gps_center_degrees = [radian_to_degree(x) for x in gps_center] gps_center_max_distance = 0 for sample in gps_samples: point = (sample.latitude, sample.longitude) point_degrees = [radian_to_degree(x) for x in point] distance = vincenty(gps_center_degrees, point_degrees).meters gps_center_max_distance = max(gps_center_max_distance, distance) move.gps_center_max_distance = gps_center_max_distance first_sample = gps_samples[0] latitude = first_sample.latitude longitude = first_sample.longitude geolocator = Nominatim() location = geolocator.reverse( "%f, %f" % (radian_to_degree(latitude), radian_to_degree(longitude))) move.location_address = location.address move.location_raw = json.dumps(location.raw) else: print(u" got no GPS samples for move %d: %s" % (move.id, move.activity))
"""Module containing configurations for the Finnhub historical database destination""" from sqlalchemy.ext.declarative import declarative_base from judah.destinations.database.model import DatabaseBaseModel FinnhubHistoricalDBBase = declarative_base() class FinnhubHistoricalDatabaseBaseModel(DatabaseBaseModel, FinnhubHistoricalDBBase): __abstract__ = True _base_declarative_class = FinnhubHistoricalDBBase
from datetime import datetime, timedelta, timezone from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm.session import sessionmaker from os.path import exists, isfile, join # Base needed for sqlalchemy BaseSession = declarative_base() BaseSessionTwo = declarative_base() EPOCH = datetime(1970, 1, 1) WEBKITEPOCH = datetime(1601, 1, 1) OTHER = "other" DT_SEC = "datetime_second" DT_SEC_DOT_MILLI = "datetime_second_dot_milli" DT_MILLI = "datetime_milli" DT_MICRO = "datetime_microseconds" DT_MILLI_ZEROED_MICRO = "datetime_milliseconds_zeroed_microseconds" DT_MILLI_OR_ZERO = "datetime_milliseconds_zero" DT_ZERO = "datetime_always_zero" DT_WEBKIT = "datetime_webkit" DT_STRING = "datetime_string" MILLI_FACTOR = 1000 MICRO_FACTOR = 1000000 PERSISTENT = "Dauerhaft"
import sqlalchemy as sa import sqlalchemy.orm as orm from sqlalchemy.orm import Session import sqlalchemy.ext.declarative as dec SqlAlchemyBase = dec.declarative_base() __factory = None def global_init(db_file): """Супер важный код для БД""" global __factory if __factory: return if not db_file or not db_file.strip(): raise Exception("Необходимо указать файл базы данных.") conn_str = f'sqlite:///{db_file.strip()}?check_same_thread=False' print(f"Подключение к базе данных по адресу {conn_str}") engine = sa.create_engine(conn_str, echo=False) __factory = orm.sessionmaker(bind=engine) from . import __all_models SqlAlchemyBase.metadata.create_all(engine)
# License for the specific language governing permissions and limitations # under the License. """ SQLAlchemy models for cinder data. """ from oslo_config import cfg from oslo_db.sqlalchemy import models from oslo_utils import timeutils from sqlalchemy import Column, Integer, String, Text, schema from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import ForeignKey, DateTime, Boolean from sqlalchemy.orm import relationship, backref, validates CONF = cfg.CONF BASE = declarative_base() class CinderBase(models.TimestampMixin, models.ModelBase): """Base class for Cinder Models.""" __table_args__ = {'mysql_engine': 'InnoDB'} # TODO(rpodolyaka): reuse models.SoftDeleteMixin in the next stage # of implementing of BP db-cleanup deleted_at = Column(DateTime) deleted = Column(Boolean, default=False) metadata = None def delete(self, session): """Delete this object."""
def get_stock_price_offline(tick): from sqlalchemy import create_engine engine = create_engine('sqlite://../database/database.db', echo = True) from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() stock_default_list = defaultdict(list) stock_default_list[tick] = [] # Use the Flask-SQLAlchemy to query our data from database stock_data = StockPrice.find_all_by_query(comp=tick) date_ = [] high = [] low = [] open_ = [] adj_close = [] vol = [] # Store/Split the data into train & test dataframe for row in stock_data: date = dt.datetime.strptime(str(row.date), '%Y-%m-%d') date_.append(date) high.append(row.high) low.append(row.low) open_.append(row.open_) adj_close.append(row.adj_close) vol.append(row.vol) df = pd.DataFrame({ 'date': date_, 'high': high, 'low': low, 'open': open_, 'adj_close': adj_close, 'vol': vol }) df.set_index('date', inplace=True) # split dataframe into train & test part train_df, test_df = df['2012-01-01': '2016-12-31'], df['2017-01-01': '2020-06-30'] # We need to standardize the input before putting them into the model min_max_scaler = MinMaxScaler(feature_range=(0, 1)) train_scaled = min_max_scaler.fit_transform(train_df.values) time_step = 180 target_col_idx = 3 # Get the trainset part X_train, y_train = to_model_input(time_step, train_scaled, target_col_idx) # Get the testset part dataset_total = pd.concat([train_df, test_df], axis=0) testing_inputs = dataset_total[len(dataset_total)-len(test_df)-time_step:] testing_scaled = min_max_scaler.transform(testing_inputs) X_test, y_test = to_model_input(time_step, testing_scaled, target_col_idx) stock_default_list[tick].append(X_train) stock_default_list[tick].append(y_train) stock_default_list[tick].append(X_test) stock_default_list[tick].append(y_test) stock_default_list[tick].append(test_df) return test_df, stock_default_list, min_max_scaler
from flask_login import UserMixin from sqlalchemy import Column, Integer, String, ForeignKey from dictalchemy import DictableModel from sqlalchemy.ext.declarative import declarative_base Base = declarative_base(cls=DictableModel) class User(Base, UserMixin): """Represents a single application user.""" __tablename__ = 'users' id = Column(Integer, primary_key=True) first_name = Column(String(50)) last_name = Column(String(50)) username = Column(String(15), unique=True) email = Column(String(50), unique=True) password = Column(String(80)) def __repr__(self): return f'{self.first_name} {self.last_name}' class UserSavedRecipes(Base): """Saves a recipe id and user id in order to store recipes for later viewing.""" __tablename__ = 'user_saved_recipes' id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey('users.id')) spoonacular_recipe_id = Column(Integer)
# coding: utf-8 from sqlalchemy import Column, Date, Index, String, TIMESTAMP, Text, text from sqlalchemy.dialects.mysql import BIGINT, INTEGER, TINYINT from sqlalchemy.ext.declarative import declarative_base BaseModel = declarative_base() metadata = BaseModel.metadata # 插画基本信息表 class Illustration(BaseModel): __tablename__ = 'illustration' id = Column(BIGINT(20), primary_key=True) user_id = Column(BIGINT(20), nullable=False, index=True) title = Column(String(255), nullable=False, server_default=text("''")) type = Column(String(50), nullable=False, server_default=text("''")) caption = Column(Text) restrict = Column(INTEGER(11), nullable=False, server_default=text("'0'")) create_date = Column(String(40), nullable=False, server_default=text("''")) page_count = Column(INTEGER(11), nullable=False, server_default=text("'0'")) width = Column(INTEGER(11), nullable=False, server_default=text("'0'")) height = Column(INTEGER(11), nullable=False, server_default=text("'0'")) sanity_level = Column(INTEGER(11), nullable=False, server_default=text("'0'")) x_restrict = Column(INTEGER(11), nullable=False, server_default=text("'0'"))
import sqlalchemy as db from sqlalchemy.ext.declarative import declarative_base BaseAlchemy = declarative_base() class PessoaModel(BaseAlchemy): __tablename__ = "pessoa" id = db.Column(db.Integer, primary_key=True) nome = db.Column(db.String(length = 100)) sobrenome = db.Column(db.String(length = 200)) idade = db.Column(db.Integer) def __str__(self): return "{}|{} {} - {}".format(self.id, self.nome, self.sobrenome, self.idade)
from passlib.handlers.cisco import cisco_type7 from passlib.hash import pbkdf2_sha256 from sqlalchemy import Column, Date, DateTime, Enum, ForeignKey, Integer, Unicode, \ UnicodeText, Table, type_coerce, case, Boolean from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property from sqlalchemy.orm import relationship from sqlalchemy_i18n import make_translatable import messages from db.json_model import JSONModel import settings mylogger = logging.getLogger(__name__) SQLAlchemyBase = declarative_base() make_translatable(options={"locales": settings.get_accepted_languages()}) def _generate_media_url(class_instance, class_attibute_name, default_image=False): class_base_url = urljoin( urljoin( urljoin("http://{}".format(settings.STATIC_HOSTNAME), settings.STATIC_URL), settings.MEDIA_PREFIX), class_instance.__tablename__ + "/") class_attribute = getattr(class_instance, class_attibute_name) if class_attribute is not None: return urljoin( urljoin(
def setUp(self): self.metadata = MetaData() self.DeclarativeBase = declarative_base(metadata=self.metadata) self.engine = create_engine('sqlite:///:memory:', echo=True) self.Session = sessionmaker(bind=self.engine)
upon access of a non-existent key, in the same manner as Python's "collections.defaultdict" object. """ from sqlalchemy import String, Integer, Column, create_engine, ForeignKey from sqlalchemy.orm import relationship, Session from sqlalchemy.orm.collections import MappedCollection from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.associationproxy import association_proxy import operator class Base(object): id = Column(Integer, primary_key=True) Base = declarative_base(cls=Base) class GenDefaultCollection(MappedCollection): def __missing__(self, key): self[key] = b = B(key) return b class A(Base): __tablename__ = "a" associations = relationship("B", collection_class=lambda: GenDefaultCollection(operator.attrgetter("key")) ) collections = association_proxy("associations", "values") """Bridge the association from 'associations' over to the 'values' association proxy of B.
class UserCreate(models.BaseUserCreate): pass class UserUpdate(User, models.BaseUserUpdate): pass class UserDB(User, models.BaseUserDB): pass database = databases.Database(DATABASE_URL) Base: DeclarativeMeta = declarative_base() class UserTable(Base, SQLAlchemyBaseUserTable): pass engine = sqlalchemy.create_engine( DATABASE_URL, connect_args={"check_same_thread": False} ) Base.metadata.create_all(engine) users = UserTable.__table__ user_db = SQLAlchemyUserDatabase(UserDB, database, users)
# # ----------------------------------------------------------------------------- from config import DB_URI from sqlalchemy import create_engine from sqlalchemy import (Column, Integer, String, Boolean, DateTime, text, ForeignKey) from sqlalchemy.pool import QueuePool from sqlalchemy.orm import sessionmaker, relationship, backref from sqlalchemy.ext.declarative import declarative_base engine = create_engine(DB_URI, echo=False) Session = sessionmaker(bind=engine) Table = declarative_base() Table.metadata.bind = engine class User(Table): __tablename__ = 'user' id = Column(Integer, primary_key=True) name = Column(String(100), nullable=True) password = Column(String(255), nullable=False) active = Column(Boolean, index=True, nullable=True, server_default=text("true")) role = Column(String(30),nullable=False)
from contextlib import contextmanager from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session, Session from sqlalchemy.ext.declarative import declarative_base from loguru import logger from ..core.settings import Settings # TODO добавить ТП внутри ПП # TODO добавить рассылку сообщений внутри ПП Base = declarative_base(name=Settings().sqlite_dsn.split("/")[-1]) engine = create_engine(Settings().sqlite_dsn, echo=False) global_session = sessionmaker(bind=engine) scoped_Session = scoped_session(sessionmaker(bind=engine, expire_on_commit=False, autoflush=False)) @contextmanager def session_scope() -> Session: session = scoped_Session() try: yield session session.commit() except Exception as e: session.rollback() logger.error(e) raise e finally: session.close()
assigned any primary key information to the object before calling. """ self._commit(session, batch, merge=True) def __repr__(self): retval = "%s:\n" % self.__class__ for k, v in self.__dict__.items(): if k == "_sa_instance_state": continue retval += " * {} : {}\n".format(k, v) return retval metadata = MetaData() Base = declarative_base(cls=SABase, metadata=metadata) # ---------------------------------------------------------------- # Method to verify if tables exist or are according to the schema # ---------------------------------------------------------------- def get_missing_tables(db): tables = ( DBVersion, # WORKFLOW Workflow, Workflowstate, WorkflowMeta, WorkflowFiles,
class SQLTrackerStore(TrackerStore): """Store which can save and retrieve trackers from an SQL database.""" from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() class SQLEvent(Base): """Represents an event in the SQL Tracker Store""" from sqlalchemy import Column, Integer, String, Float, Text __tablename__ = "events" id = Column(Integer, primary_key=True) sender_id = Column(String(255), nullable=False, index=True) type_name = Column(String(255), nullable=False) timestamp = Column(Float) intent_name = Column(String(255)) action_name = Column(String(255)) data = Column(Text) def __init__( self, domain: Optional[Domain] = None, dialect: Text = "sqlite", host: Optional[Text] = None, port: Optional[int] = None, db: Text = "rasa.db", username: Text = None, password: Text = None, event_broker: Optional[EventChannel] = None, login_db: Optional[Text] = None, query: Optional[Dict] = None, ) -> None: from sqlalchemy.orm import sessionmaker from sqlalchemy import create_engine import sqlalchemy.exc engine_url = self.get_db_url(dialect, host, port, db, username, password, login_db, query) logger.debug("Attempting to connect to database via '{}'.".format( repr(engine_url))) # Database might take a while to come up while True: try: # pool_size and max_overflow can be set to control the number of # connections that are kept in the connection pool. Not available # for SQLite, and only tested for postgresql. See # https://docs.sqlalchemy.org/en/13/core/pooling.html#sqlalchemy.pool.QueuePool if dialect == "postgresql": self.engine = create_engine( engine_url, pool_size=int(os.environ.get("SQL_POOL_SIZE", "50")), max_overflow=int( os.environ.get("SQL_MAX_OVERFLOW", "100")), ) else: self.engine = create_engine(engine_url) # if `login_db` has been provided, use current channel with # that database to create working database `db` if login_db: self._create_database_and_update_engine(db, engine_url) try: self.Base.metadata.create_all(self.engine) except ( sqlalchemy.exc.OperationalError, sqlalchemy.exc.ProgrammingError, ) as e: # Several Rasa services started in parallel may attempt to # create tables at the same time. That is okay so long as # the first services finishes the table creation. logger.error("Could not create tables: {}".format(e)) self.sessionmaker = sessionmaker(bind=self.engine) break except ( sqlalchemy.exc.OperationalError, sqlalchemy.exc.IntegrityError, ) as e: logger.warning(e) sleep(5) logger.debug("Connection to SQL database '{}' successful.".format(db)) super(SQLTrackerStore, self).__init__(domain, event_broker) @staticmethod def get_db_url( dialect: Text = "sqlite", host: Optional[Text] = None, port: Optional[int] = None, db: Text = "rasa.db", username: Text = None, password: Text = None, login_db: Optional[Text] = None, query: Optional[Dict] = None, ) -> Union[Text, "URL"]: """Builds an SQLAlchemy `URL` object representing the parameters needed to connect to an SQL database. Args: dialect: SQL database type. host: Database network host. port: Database network port. db: Database name. username: User name to use when connecting to the database. password: Password for database user. login_db: Alternative database name to which initially connect, and create the database specified by `db` (PostgreSQL only). query: Dictionary of options to be passed to the dialect and/or the DBAPI upon connect. Returns: URL ready to be used with an SQLAlchemy `Engine` object. """ from urllib.parse import urlsplit from sqlalchemy.engine.url import URL # Users might specify a url in the host parsed = urlsplit(host or "") if parsed.scheme: return host if host: # add fake scheme to properly parse components parsed = urlsplit("schema://" + host) # users might include the port in the url port = parsed.port or port host = parsed.hostname or host return URL( dialect, username, password, host, port, database=login_db if login_db else db, query=query, ) def _create_database_and_update_engine(self, db: Text, engine_url: "URL"): """Create databse `db` and update engine to reflect the updated `engine_url`.""" from sqlalchemy import create_engine self._create_database(self.engine, db) engine_url.database = db self.engine = create_engine(engine_url) @staticmethod def _create_database(engine: "Engine", db: Text): """Create database `db` on `engine` if it does not exist.""" import psycopg2 conn = engine.connect() cursor = conn.connection.cursor() cursor.execute("COMMIT") cursor.execute( ("SELECT 1 FROM pg_catalog.pg_database WHERE datname = '{}'". format(db))) exists = cursor.fetchone() if not exists: try: cursor.execute("CREATE DATABASE {}".format(db)) except psycopg2.IntegrityError as e: logger.error("Could not create database '{}': {}".format( db, e)) cursor.close() conn.close() @contextlib.contextmanager def session_scope(self): """Provide a transactional scope around a series of operations.""" session = self.sessionmaker() try: yield session finally: session.close() def keys(self) -> Iterable[Text]: """Returns sender_ids of the SQLTrackerStore""" with self.session_scope() as session: sender_ids = session.query( self.SQLEvent.sender_id).distinct().all() return [sender_id for (sender_id, ) in sender_ids] def retrieve(self, sender_id: Text) -> Optional[DialogueStateTracker]: """Create a tracker from all previously stored events.""" with self.session_scope() as session: query = session.query(self.SQLEvent) result = (query.filter_by(sender_id=sender_id).order_by( self.SQLEvent.timestamp).all()) events = [json.loads(event.data) for event in result] if self.domain and len(events) > 0: logger.debug( "Recreating tracker from sender id '{}'".format(sender_id)) return DialogueStateTracker.from_dict(sender_id, events, self.domain.slots) else: logger.debug("Can't retrieve tracker matching " "sender id '{}' from SQL storage. " "Returning `None` instead.".format(sender_id)) return None def save(self, tracker: DialogueStateTracker) -> None: """Update database with events from the current conversation.""" if self.event_broker: self.stream_events(tracker) with self.session_scope() as session: # only store recent events events = self._additional_events(session, tracker) for event in events: data = event.as_dict() intent = data.get("parse_data", {}).get("intent", {}).get("name") action = data.get("name") timestamp = data.get("timestamp") # noinspection PyArgumentList session.add( self.SQLEvent( sender_id=tracker.sender_id, type_name=event.type_name, timestamp=timestamp, intent_name=intent, action_name=action, data=json.dumps(data), )) session.commit() logger.debug("Tracker with sender_id '{}' " "stored to database".format(tracker.sender_id)) def _additional_events(self, session: "Session", tracker: DialogueStateTracker) -> Iterator: """Return events from the tracker which aren't currently stored.""" n_events = (session.query(self.SQLEvent.sender_id).filter_by( sender_id=tracker.sender_id).count() or 0) return itertools.islice(tracker.events, n_events, len(tracker.events))
def setup_method(self, _): # Always see the diff # https://docs.python.org/2/library/unittest.html#unittest.TestCase.maxDiff self.maxDiff = None # pylint: disable=invalid-name self._tables = [] from geoalchemy2 import Geometry from sqlalchemy import Column, Table, func, types from sqlalchemy.ext.declarative import declarative_base from c2cgeoportal_commons.models import DBSession from c2cgeoportal_commons.models.main import ( OGCSERVER_AUTH_GEOSERVER, OGCSERVER_TYPE_GEOSERVER, FullTextSearch, Functionality, Interface, LayerGroup, LayerWMS, OGCServer, RestrictionArea, Role, Theme, ) from c2cgeoportal_commons.models.static import User setup_db() role1 = Role(name="__test_role1") user1 = User(username="******", password="******", settings_role=role1, roles=[role1]) user1.email = "*****@*****.**" role2 = Role(name="__test_role2", extent=WKTElement("POLYGON((1 2, 1 4, 3 4, 3 2, 1 2))", srid=21781)) user2 = User(username="******", password="******", settings_role=role2, roles=[role2]) main = Interface(name="desktop") mobile = Interface(name="mobile") engine = DBSession.c2c_rw_bind engine.connect() a_geo_table = Table( "a_geo_table", declarative_base(bind=engine).metadata, Column("id", types.Integer, primary_key=True), Column("geom", Geometry("POINT", srid=21781)), schema="geodata", ) self._tables = [a_geo_table] a_geo_table.drop(checkfirst=True) a_geo_table.create() ogcserver = create_default_ogcserver() private_layer_edit = LayerWMS(name="__test_private_layer_edit", public=False) private_layer_edit.layer = "__test_private_layer" private_layer_edit.geo_table = "a_schema.a_geo_table" private_layer_edit.interfaces = [main, mobile] private_layer_edit.ogc_server = ogcserver public_layer2 = LayerWMS(name="__test_public_layer", layer="__test_public_layer_bis", public=True) public_layer2.interfaces = [main, mobile] public_layer2.ogc_server = ogcserver private_layer = LayerWMS(name="__test_private_layer", layer="__test_private_layer_bis", public=False) private_layer.interfaces = [main, mobile] private_layer.ogc_server = ogcserver interface_not_in_mapfile = Interface(name="interface_not_in_mapfile") public_layer_not_in_mapfile = LayerWMS( name="__test_public_layer_not_in_mapfile", layer="__test_public_layer_not_in_mapfile", public=True) public_layer_not_in_mapfile.interfaces = [interface_not_in_mapfile] public_layer_not_in_mapfile.ogc_server = ogcserver interface_notmapfile = Interface(name="interface_notmapfile") ogcserver_notmapfile = OGCServer(name="__test_ogc_server_notmapfile") ogcserver_notmapfile.url = mapserv_url + "?map=not_a_mapfile" public_layer_not_mapfile = LayerWMS( name="__test_public_layer_notmapfile", layer="__test_public_layer_notmapfile", public=True) public_layer_not_mapfile.interfaces = [interface_notmapfile] public_layer_not_mapfile.ogc_server = ogcserver_notmapfile interface_geoserver = Interface(name="interface_geoserver") ogcserver_geoserver = OGCServer(name="__test_ogc_server_geoserver") ogcserver_geoserver.url = mapserv_url ogcserver_geoserver.type = OGCSERVER_TYPE_GEOSERVER ogcserver_geoserver.auth = OGCSERVER_AUTH_GEOSERVER public_layer_geoserver = LayerWMS(name="__test_public_layer_geoserver", layer="testpoint_unprotected", public=True) public_layer_geoserver.interfaces = [interface_geoserver] public_layer_geoserver.ogc_server = ogcserver_geoserver interface_no_layers = Interface(name="interface_no_layers") public_layer_no_layers = LayerWMS(name="__test_public_layer_no_layers", public=True) public_layer_no_layers.interfaces = [interface_no_layers] public_layer_no_layers.ogc_server = ogcserver group = LayerGroup(name="__test_layer_group") group.children = [ private_layer_edit, public_layer2, public_layer_not_in_mapfile, public_layer_not_mapfile, public_layer_geoserver, public_layer_no_layers, private_layer, ] theme = Theme(name="__test_theme") theme.children = [group] theme.interfaces = [ main, interface_not_in_mapfile, interface_notmapfile, interface_geoserver, interface_no_layers, ] functionality1 = Functionality(name="test_name", value="test_value_1") functionality2 = Functionality(name="test_name", value="test_value_2") theme.functionalities = [functionality1, functionality2] poly = "POLYGON((-100 0, -100 20, 100 20, 100 0, -100 0))" area = WKTElement(poly, srid=21781) RestrictionArea( name="__test_ra1", description="", layers=[private_layer_edit, private_layer], roles=[role1], area=area, ) area = WKTElement(poly, srid=21781) RestrictionArea( name="__test_ra2", description="", layers=[private_layer_edit, private_layer], roles=[role2], area=area, readwrite=True, ) entry1 = FullTextSearch() entry1.label = "label1" entry1.layer_name = "layer1" entry1.ts = func.to_tsvector("french", "soleil travail") entry1.the_geom = WKTElement("POINT(-90 -45)", 21781) entry1.public = True entry2 = FullTextSearch() entry2.label = "label1" entry2.layer_name = "layer1" entry2.ts = func.to_tsvector("french", "soleil travail") entry2.the_geom = WKTElement("POINT(-90 -45)", 21781) entry2.public = True entry3 = FullTextSearch() entry3.label = "label1" entry3.layer_name = None entry3.ts = func.to_tsvector("french", "soleil travail") entry3.the_geom = WKTElement("POINT(-90 -45)", 21781) entry3.public = True DBSession.add_all([user1, user2, theme, entry1, entry2, entry3]) DBSession.flush() self.role1_id = role1.id transaction.commit()
""" __table_args__ = { # Don't use MyISAM in MySQL. It doesn't support ON DELETE CASCADE. 'mysql_engine': 'InnoDB', # Don't use BlitzDB in Drizzle. It doesn't support foreign keys. 'drizzle_engine': 'InnoDB', # Collate to UTF-8. 'mysql_charset': 'utf8', } BaseDTO = declarative_base(cls=BaseDTO) #------------------------------------------------------------------------------ # TODO: if using mssql, check it's at least SQL Server 2005 # (LIMIT and OFFSET support is required). # TODO: if using mysql, check it's at least MySQL 5.0.3 # (nested transactions are required). # TODO: maybe in mysql check the tables are not myisam? # TODO: maybe create the database if it doesn't exist? # TODO: maybe add a method to compact the database? # http://stackoverflow.com/questions/1875885 # http://www.sqlite.org/lang_vacuum.html # http://dev.mysql.com/doc/refman/5.1/en/optimize-table.html # http://msdn.microsoft.com/en-us/library/ms174459(v=sql.90).aspx
from typing import Any from domain.constants import TaskStatusEnum from sqlalchemy import Column, Date, Enum, Integer, String, Time from sqlalchemy.ext.declarative import declarative_base Base: Any = declarative_base() class TaskORM(Base): __tablename__ = "tasks" id = Column(Integer, primary_key=True) name = Column(String, nullable=False) status = Column(Enum(TaskStatusEnum), nullable=False) date_start = Column(Date, nullable=False) time_start = Column(Time, nullable=False) def __repr__(self): return f"<Task(id={self.id}, name={self.name.__repr__()}, " \ f"status={self.status.__repr__()}, " \ f"date_start={self.date_start.__repr__()}, " \ f"time_start={self.time_start.__repr__()})>"
import sqlalchemy from sqlalchemy import Column, Integer, String, ForeignKey, ForeignKeyConstraint, Boolean from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, Session, sessionmaker __all__ = [ 'engine', 'DatabaseObject', 'Session', 'Column', 'Integer', 'String', 'ForeignKey', 'relationship', 'Boolean' ] engine = sqlalchemy.create_engine('sqlite:///dozer.db') DatabaseObject = declarative_base(bind=engine, name='DatabaseObject') DatabaseObject.__table_args__ = { 'extend_existing': True } #allow use of the reload command with db cogs class CtxSession(Session): def __enter__(self): return self async def __aenter__(self): return self def __exit__(self, err_type, err, tb): if err_type is None: self.commit() else: self.rollback() return False
import logging # MODELOS ################################################################################ from ..model import engine as predictive_engine from ..model import analysis_session from ..tools.property import propertycached from ...tools.dates import Timedelta from ...analysis.basics.context import Context from sqlalchemy import Column, Integer, Float, DateTime from sqlalchemy.ext.declarative import declarative_base from predictive.systems.statistical.tools.dates import Datetime Base = declarative_base(predictive_engine) class _BasalInsulin24hDoses(Base): """""" __tablename__ = 'analysis_basics_basal_doses_24h' """""" pk = Column(Integer, primary_key=True) # lint:ok user_pk = Column(Integer, nullable=False) datetime = Column(DateTime, nullable=False, index=True) # recommended doses absorved in 24 hours doses_absorved_24h = Column(Float, nullable=False) def __str__(self): st = "BasalInsulinNeeds: user_pk: {}, datetime: {}, doses_absorved_24h: {}".format(
import os from sqlalchemy import Column, Integer, String, DateTime from sqlalchemy import Index, UniqueConstraint from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker ENGINE = create_engine("mysql+pymysql://root:[email protected]:3306/sz_finance?charset=utf8mb4", ) # Base是declarative_base的实例化对象 Base = declarative_base() DBSession = sessionmaker(bind=ENGINE) session = DBSession() # 每个类都要继承Base class CashFlowStatement(Base): # __tablename__是必须要的,它是设置实际存在数据库中的表名 __tablename__ = os.path.split(__file__)[-1].split(".")[0] # 股票通用 字段 # primary_key主键、index索引、nullable是否可以为空 Id = Column(Integer, index=True, primary_key=True, autoincrement=True) # 股票代码 Code = Column(Integer, index=True, comment='股票代码-数字型', unique=False) # 股票代码 Code_Str = Column(String(32), index=True, comment='股票代码-字符串型', unique=False) # 报告期 X
def Base(request, metadata): return declarative_base(metadata=metadata)
#!/usr/local/bin/env python3 # -*- coding:utf-8 -*- # __author__:"Howard" from sqlalchemy import Table, Column, Integer, String, ForeignKey from sqlalchemy.orm import relationship from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import UniqueConstraint from sqlalchemy import create_engine from conf.settings import DB_URI """ 初始化数据库 """ engine = create_engine(DB_URI, encoding='utf-8') Base = declarative_base(engine) class_m2m_teacher = Table( 'class_m2m_teacher', Base.metadata, Column('class_id', Integer, ForeignKey('class.id')), Column('teacher_id', Integer, ForeignKey('teacher.id'))) class_m2m_student = Table('class_m2m_student', Base.metadata, Column('class_id', Integer, ForeignKey('class.id')), Column('stu_id', Integer, ForeignKey('student.id'))) class Student(Base): __tablename__ = 'student' id = Column(Integer, primary_key=True) stu_name = Column(String(32), nullable=None, unique=True) qq = Column(Integer)
Text, MetaData, CheckConstraint, text, UniqueConstraint from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, backref, scoped_session, sessionmaker convention = { "ix": 'ix_%(column_0_label)s', "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(column_0_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" } metadata = MetaData(naming_convention=convention) Base = declarative_base(metadata=metadata) # type: Any def make_session_maker(home: str) -> scoped_session: db_path = os.path.join(home, 'svs.sqlite') engine = create_engine('sqlite:///{}'.format(db_path)) maker = sessionmaker(bind=engine) return scoped_session(maker) class Source(Base): __tablename__ = 'sources' id = Column(Integer, primary_key=True) uuid = Column(String(36), unique=True, nullable=False)
# Class for NOT automatically determining table names class NoNameMeta(BindMetaMixin, DeclarativeMeta): pass meta = MetaData( naming_convention={ "ix": "ix_%(column_0_label)s", "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" }) db = SQLAlchemy(model_class=declarative_base( cls=Model, metaclass=NoNameMeta, name='Model', metadata=meta)) token_length = 12 # Initializing login-manager and the user loader function required for login-manager login_manager = LoginManager() @login_manager.user_loader def load_grader(grader_id): return Grader.query.get(grader_id) # db.Models #
# 数字货币 import pandas as pd from sqlalchemy.ext.declarative import declarative_base from zvt.contract import EntityMixin from zvt.contract.register import register_entity, register_schema CoinMetaBase = declarative_base() @register_entity(entity_type='coin') class Coin(CoinMetaBase, EntityMixin): __tablename__ = 'coin' @classmethod def get_trading_dates(cls, start_date=None, end_date=None): return pd.date_range(start_date, end_date, freq='D') @classmethod def could_short(cls): return True @classmethod def get_trading_t(cls): return 0 @classmethod def get_trading_intervals(cls): return [('00:00,23:59')]
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.schema import MetaData NAMING_CONVENTION = { "ix": 'ix_%(column_0_label)s', "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s" } metadata = MetaData(naming_convention=NAMING_CONVENTION) Base = declarative_base(metadata=metadata) __version__ = '0.1.0'
from pyramid.util import DottedNameResolver from pyramid_beaker import session_factory_from_settings from six import binary_type from six import string_types from sqlalchemy import MetaData from sqlalchemy import engine_from_config from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session from sqlalchemy.orm import sessionmaker from zope.sqlalchemy import ZopeTransactionExtension from kotti.sqla import Base as KottiBase metadata = MetaData() DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base(cls=KottiBase) Base.metadata = metadata Base.query = DBSession.query_property() TRUE_VALUES = ("1", "y", "yes", "t", "true") FALSE_VALUES = ("0", "n", "no", "f", "false", "none") def authtkt_factory(**settings): from kotti.security import list_groups_callback kwargs = dict( secret=settings["kotti.secret2"], hashalg="sha512", callback=list_groups_callback, ) try: