def test_all_htmlpopups(self): from chsdi.models import models_from_name from chsdi.models.bod import LayersConfig from sqlalchemy import distinct from sqlalchemy.orm import scoped_session, sessionmaker val = True DBSession = scoped_session(sessionmaker()) query = DBSession.query(distinct(LayersConfig.idBod)).filter(LayersConfig.queryable == val).filter(LayersConfig.staging == 'prod') # Get a list of all the queryable layers layers = [q[0] for q in query] DBSession.close() # Get a list of feature ids features = [] for layer in layers: try: model = models_from_name(layer)[0] DBSession = scoped_session(sessionmaker()) query = DBSession.query(model.primary_key_column()).limit(1) ID = [q[0] for q in query] # If tables are empty ID is an empty list if ID: features.append((layer, str(ID[0]))) DBSession.close() except Exception as e: print e finally: DBSession.close() for f in features: for lang in ('de', 'fr', 'it', 'rm', 'en'): self.testapp.get('/rest/services/all/MapServer/%s/%s/htmlPopup' % (f[0], f[1]), params={'callback': 'cb', 'lang': '%s' % lang}, status=200)
def connect(dbstr): engine = create_engine(dbstr, echo = False) connection = engine.connect() #handle case that the db was initialized before a version table existed yet if engine.dialect.has_table(connection, "program"): # if there are existing tables if not engine.dialect.has_table(connection, "_meta"): # if no version table, assume outdated db version and error connection.close() raise Exception("Your opentuner database is currently out of date. Save a back up and reinitialize") # else if we have the table already, make sure version matches if engine.dialect.has_table(connection, "_meta"): Session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) version = _Meta.get_version(Session) if not DB_VERSION == version: raise Exception('Your opentuner database version {} is out of date with the current version {}'.format(version, DB_VERSION)) Base.metadata.create_all(engine) Session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) # mark database with current version _Meta.add_version(Session, DB_VERSION) Session.commit() return engine, Session
def test_all_htmlpopups(self): from chsdi.models import models_from_name from chsdi.models.bod import LayersConfig from sqlalchemy import distinct from sqlalchemy.orm import scoped_session, sessionmaker val = True DBSession = scoped_session(sessionmaker()) valnone = None query = DBSession.query(distinct(LayersConfig.layerBodId)).filter(LayersConfig.staging == 'prod').filter(LayersConfig.queryable == val).filter(LayersConfig.parentLayerId == valnone) features = [] try: for layer in getLayers(query): try: FeatDBSession = scoped_session(sessionmaker()) models = models_from_name(layer) self.failUnless(models is not None and len(models) > 0, layer) model = models[0] query = FeatDBSession.query(model.primary_key_column()).limit(1) ID = [q[0] for q in query] if ID: features.append((layer, str(ID[0]))) finally: FeatDBSession.close() finally: DBSession.close() for f in features: for lang in ('de', 'fr', 'it', 'rm', 'en'): link = '/rest/services/all/MapServer/' + f[0] + '/' + f[1] + '/htmlPopup?callback=cb&lang=' + lang resp = self.testapp.get(link) self.failUnless(resp.status_int == 200, link)
def __init__(self, bus, connection_string): """ The plugin is registered to the CherryPy engine and therefore is part of the bus (the engine *is* a bus) registery. We use this plugin to create the SA engine. At the same time, when the plugin starts we create the tables into the database using the mapped class of the global metadata. """ plugins.SimplePlugin.__init__(self, bus) # self.sa_engine = None # self.connection_string = connection_string # self.session = scoped_session(sessionmaker(autoflush=True, autocommit=False)) self.sa_engine_online = None self.connection_string_online = connection_string.connectUrlonline self.session_online = scoped_session(sessionmaker(autoflush=True, autocommit=False)) self.sa_engine_offline = None self.connection_string_offline = connection_string.connectUrloffline self.session_offline = scoped_session(sessionmaker(autoflush=True, autocommit=False)) self.sa_engine_cache = None self.connection_string_cache = connection_string.connectUrlcache self.session_cache = scoped_session(sessionmaker(autoflush=True, autocommit=False))
def __init__(self, uri=None, engine=None, **kwargs): """Create a new SqlAlchemyStore. When given a connection string (uri) or SQLAlchemy Engine (engine), this Store will create it's own internal SQLAlchemy Session to manage objects. If you do not provide a URI or Engine, your mapped object metadata must be bound to their own engines. """ super(SqlAlchemyStore, self).__init__() # get the session if "session" in kwargs: # we no longer allow initialization with a pre-existing session object, there # are too many issues with this approach at the moment raise DeprecationWarning("cannot instantiate a SqlAlchemyStore with a pre-existing session object") else: # no session, we have to make one # first we need to get the engine if uri and engine: # can't give two ways to get an engine raise ValueError("you can only provide either a connection string URI or an engine, not both") elif uri: # we have a uri to create an engine engine = create_engine(uri) if engine: # we have an engine, we can create the bound session now self.__session = scoped_session(sessionmaker(autoflush=True, bind=engine)) else: # no engine or URI was specified, create an unbound session # (mapped object metadata will need to be bound to an engine in this case) self.__session = scoped_session(sessionmaker(autoflush=True))
def autoupdate(): """ Autoupdate data layer about new models in engines. """ scoped_session(Backend.instance().get_sessionmaker) Backend.instance().get_base().metadata.create_all( Backend.instance().get_engine())
def create_security_group_rule_bulk_native(self, context, security_group_rule): r = security_group_rule['security_group_rules'] scoped_session(context.session) security_group_id = self._validate_security_group_rules( context, security_group_rule) with context.session.begin(subtransactions=True): if not self.get_security_group(context, security_group_id): raise ext_sg.SecurityGroupNotFound(id=security_group_id) self._check_for_duplicate_rules(context, r) ret = [] for rule_dict in r: rule = rule_dict['security_group_rule'] tenant_id = self._get_tenant_id_for_create(context, rule) db = SecurityGroupRule( id=uuidutils.generate_uuid(), tenant_id=tenant_id, security_group_id=rule['security_group_id'], direction=rule['direction'], remote_group_id=rule.get('remote_group_id'), ethertype=rule['ethertype'], protocol=rule['protocol'], port_range_min=rule['port_range_min'], port_range_max=rule['port_range_max'], remote_ip_prefix=rule.get('remote_ip_prefix')) context.session.add(db) ret.append(self._make_security_group_rule_dict(db)) return ret
def init_sqlalchemy(settings): # master master_url = settings['sqlalchemy.url'] connect_kwargs = settings.get('sqlalchemy.connect_kwargs') kwargs = {} if connect_kwargs is not None: if isinstance(connect_kwargs, str): connect_kwargs = json.loads(connect_kwargs) for k, v in connect_kwargs.items(): kwargs[k] = v engine = create_engine(master_url, **kwargs) sm = orm.sessionmaker(bind=engine, extension=ZopeTransactionExtension()) meta.Session = orm.scoped_session(sm) meta.metadata.bind = engine # slaves slaves_url = settings.get('sqlalchemy.slaves', []) slaves = [] for url in slaves_url: slave = create_engine(url, **kwargs) sm = orm.sessionmaker(bind=slave, extension=ZopeTransactionExtension()) slaves.append(orm.scoped_session(sm)) if slaves: slave = random.choice(slaves) meta.BaseObject.query = slave.query_property(orm.Query) else: meta.BaseObject.query = meta.Session.query_property(orm.Query)
def get_session(): """Creates a new database session for instant use""" engine = create_engine(settings.DATASTORE_URI) Session = sessionmaker(bind = engine) scoped_session(Session) return (Session(), engine)
def test_default_constructor_state_not_shared(self): scope = scoped_session(sa.orm.sessionmaker()) class A(object): pass class B(object): def __init__(self): pass scope.mapper(A, table1) scope.mapper(B, table2) A(foo='bar') assert_raises(TypeError, B, foo='bar') scope = scoped_session(sa.orm.sessionmaker()) class C(object): def __init__(self): pass class D(object): pass scope.mapper(C, table1) scope.mapper(D, table2) assert_raises(TypeError, C, foo='bar') D(foo='bar')
def __init__(self, name=None): if not name: name = 'default' data = {} if hasattr(settings, 'DATABASES') and \ name in settings.DATABASES: data = settings.DATABASES[name] if data.get('ENGINE', '') == '': raise ImproperlyConfigured('''\ The database ORM connection requires, at minimum, an engine type.''') if '.' in data['ENGINE']: data['ENGINE'] = data['ENGINE'].rsplit('.', 1)[-1] # django needs sqlite3 but sqlalchemy references sqlite if data['ENGINE'] == 'sqlite3': data['ENGINE'] = 'sqlite' self.engine = self._create_engine(data) ro_values = dict([(k[9:], v) for k, v in data.iteritems() if k.startswith('READONLY_')]) if len(ro_values): ro_data = dict(data) ro_data.update(ro_values) self.readonly_engine = self._create_engine(ro_data) self.metadata = MetaData(self.engine) self.Base = declarative_base(metadata=self.metadata) if hasattr(self, 'readonly_engine'): self._readonly_sessionmaker = \ scoped_session(sessionmaker(bind=self.readonly_engine)) self._sessionmaker = scoped_session(sessionmaker(bind=self.engine))
def _rasp_session(engine = None): if engine is None: return scoped_session(sessionmaker(autoflush=False, autocommit=False)) else: return scoped_session(sessionmaker(autoflush=False, autocommit=False, bind=engine))
def init_rdbms(engine): """Call me before using any RDBMSStore class in the model.""" sm = orm.sessionmaker(autocommit=True, bind=engine) rdbms.sqlEngine = engine rdbms.sqlSession = orm.scoped_session(sm) sqlEngine = engine sqlSession = orm.scoped_session(sm) assert(sqlEngine) assert(sqlSession)
def create_group(): ''' Endpoint that allows the user to generate a group. GET request generates the form required to create the group. POST request makes the DB calls to create the group. ''' global USE_PROTOBUFF if request.method == 'POST': if USE_PROTOBUFF: GrpCreateClient = message_pb2.UsrGrp() result = base64.b64decode(request.form['protoString']) GrpCreateClient.ParseFromString(result) name = GrpCreateClient.name user_ids = GrpCreateClient.user_ids else: name = request.form['name'] user_ids = request.form['user_ids'] #parse string of users into integer array user_ids = user_ids.split(',') user_ids = [int(user_id) for user_id in user_ids] user_ids.append(int(current_user.id)) user_ids.sort() #make sure that the group has more than 2 people if len(user_ids) <= 2: flash('Groups must be made with more than 2 people') return json.dumps({}, 403, {'ContentType':'application/json'}) user_ids = str(user_ids) #make sure the group has not been created yet Session = scoped_session(sessionmaker(bind=engine)) s = Session() result_proxy = s.execute('SELECT * FROM groups WHERE user_ids = "' + user_ids + '" LIMIT 1') s.close() result = result_proxy.fetchone() if result is not None: flash('That group has already been created') return json.dumps({}, 403, {'ContentType':'application/json'}) #create group try: Session = scoped_session(sessionmaker(bind=engine)) s = Session() s.execute('INSERT INTO groups(groupname, user_ids) VALUES ("' + name + '","' + user_ids + '")') s.commit() s.close() return json.dumps({}, 200, {'ContentType':'application/json'}) except: return json.dumps({}, 403, {'ContentType':'application/json'}) else: return render_template( 'create_group.html' )
def __new__(cls, *args, **kwarg): if not cls._session: engine = create_engine(settings.DATABASE_ENGINE, pool_size=settings.DATABASE_POOL_SIZE, pool_recycle=300, proxy=TimerProxy()) if hasattr(settings, 'CACHED_QUERY') and settings.CACHED_QUERY: from torneira.cache import CachedQuery cls._session = scoped_session(sessionmaker(autocommit=True, autoflush=False, expire_on_commit=False, query_cls=CachedQuery, bind=engine)) else: cls._session = scoped_session(sessionmaker(autocommit=True, autoflush=False, expire_on_commit=False, bind=engine)) return cls._session
def setUp(self): self.app = TestApp(build_app().wsgifunc()) self.db = orm.scoped_session( orm.sessionmaker(bind=engine, query_cls=NoCacheQuery) )() self.db2 = orm.scoped_session( orm.sessionmaker(bind=engine, query_cls=NoCacheQuery) )() self.default_headers = { "Content-Type": "application/json" } flush()
def session(self): session = scoped_session(sessionmaker()) session.configure(bind=self.engine) session = session() session.execute("SELECT 1").scalar() try: yield session finally: scoped_session(sessionmaker()).remove()
def __init__(self, connection_string): self.log = logging.getLogger(__name__) self.sa_engine_online = None self.connection_string_online = connection_string.connectUrlonline self.session_online = scoped_session(sessionmaker(autoflush=True, autocommit=False)) self.sa_engine_offline = None self.connection_string_offline = connection_string.connectUrloffline self.session_offline = scoped_session(sessionmaker(autoflush=True, autocommit=False))
def init_from_config(settings, prefix="sqlalchemy.", use_transaction=True): """ As above but use a settings dict, eg from a Pyramid config """ global Session, engine, Base engine = sqlalchemy.engine_from_config(settings, prefix) if use_transaction: from zope.sqlalchemy import ZopeTransactionExtension Session = scoped_session(sessionmaker(bind=engine, extension=ZopeTransactionExtension())) else: Session = scoped_session(sessionmaker(bind=engine)) Base.metadata.bind = engine init_modules()
def create_security_group_rule_bulk_native(self, context, security_group_rules): rules = security_group_rules["security_group_rules"] scoped_session(context.session) security_group_id = self._validate_security_group_rules(context, security_group_rules) with context.session.begin(subtransactions=True): if not self.get_security_group(context, security_group_id): raise ext_sg.SecurityGroupNotFound(id=security_group_id) self._check_for_duplicate_rules(context, rules) ret = [] for rule_dict in rules: res_rule_dict = self._create_security_group_rule(context, rule_dict, validate=False) ret.append(res_rule_dict) return ret
def __init__(self, name=None): if not name: name = 'default' data, ro_data = get_connection_settings(name) self.engine = self._create_engine(data) self._sessionmaker = scoped_session(sessionmaker(bind=self.engine, autoflush=False)) if len(ro_data): self.readonly_engine = self._create_engine(ro_data) self._readonly_sessionmaker = \ scoped_session(sessionmaker(bind=self.readonly_engine)) self.metadata = MetaData(self.engine) self.Base = get_declarative_base() self.Base.metadata = self.metadata
def handle_startup(self): self.lg("Start Handle Module Startup", 5) error_msg = "" self.m_state = "Results" try: import sqlalchemy from sqlalchemy import Column, Integer, String, ForeignKey, Table, create_engine, MetaData, Date, DateTime, Float, Boolean from sqlalchemy.orm import relationship, backref, scoped_session, sessionmaker, relation from sqlalchemy.ext.declarative import declarative_base self.lg("Connecting to User DB(" + str(self.m_db_connection_str) + ")", 5) error_msg = "Creating Engine Base Failed" engine = sqlalchemy.create_engine(self.m_db_connection_str) error_msg = "Connecting Engine Failed" connection = engine.connect() error_msg = "Creating Scoped Session Failed" session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) self.execute_query(session) self.m_results["Status"] = "SUCCESS" self.m_results["Error"] = "" except Exception,e: self.lg("Getting Developer Accounts Failed Error(" + str(error_msg) + ") Exception(" + str(e) + ")", 5) self.m_results["Status"] = "FAILED" self.m_results["Error"] = error_msg
def create_tables(db_url, alembic_ini=None, debug=False): """ Create the tables in the database using the information from the url obtained. :arg db_url, URL used to connect to the database. The URL contains information with regards to the database engine, the host to connect to, the user and password and the database name. ie: <engine>://<user>:<password>@<host>/<dbname> :kwarg alembic_ini, path to the alembic ini file. This is necessary to be able to use alembic correctly, but not for the unit-tests. :kwarg debug, a boolean specifying wether we should have the verbose output of sqlalchemy or not. :return a session that can be used to query the database. """ engine = sa.create_engine(db_url, echo=debug) BASE.metadata.create_all(engine) if alembic_ini is not None: # pragma: no cover # then, load the Alembic configuration and generate the # version table, "stamping" it with the most recent rev: from alembic.config import Config from alembic import command alembic_cfg = Config(alembic_ini) command.stamp(alembic_cfg, "head") scopedsession = scoped_session(sessionmaker(bind=engine)) return scopedsession
def init_sqlalchemy(self, scheme, connection): try: import sqlalchemy from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import scoped_session, sessionmaker from torweb.db import CacheQuery import _mysql_exceptions from sqlalchemy import event from sqlalchemy.exc import DisconnectionError def my_on_checkout(dbapi_conn, connection_rec, connection_proxy): try: dbapi_conn.cursor().execute('select now()') except _mysql_exceptions.OperationalError: raise DisconnectionError engine = create_engine( connection, convert_unicode=True, encoding="utf-8", pool_recycle=3600*7, #echo_pool=True, echo=False, ) event.listen(engine, 'checkout', my_on_checkout) metadata = MetaData(bind=engine) session = scoped_session(sessionmaker(bind=engine, query_cls=CacheQuery)) sqlalchemy_sessions = [session] DB_Session = sessionmaker(bind=engine) return {"metadata":metadata, "session":session, "sqlalchemy_sessions":sqlalchemy_sessions} #setattr(self.app, 'metadata', metadata) #setattr(self.app, scheme.get('sqlalchemy', 'session'), session) #setattr(self.app, 'sqlalchemy_sessions', sqlalchemy_sessions) except Exception as e: print e
def session(self): if not self._session: Session = scoped_session(sessionmaker(bind=self._engine)) self._session = Session() sql = 'set search_path='+_schema+', public' self._session.execute(sql) return self._session
def setup_class(cls): if not tests.is_datastore_supported(): raise nose.SkipTest("Datastore not supported") plugin = p.load("datastore") if plugin.legacy_mode: # make sure we undo adding the plugin p.unload("datastore") raise nose.SkipTest("SQL tests are not supported in legacy mode") ctd.CreateTestData.create() cls.sysadmin_user = model.User.get("testsysadmin") cls.normal_user = model.User.get("annafan") cls.dataset = model.Package.get("annakarenina") resource = cls.dataset.resources[0] cls.data = { "resource_id": resource.id, "aliases": "books4", "fields": [{"id": u"b\xfck", "type": "text"}, {"id": "author", "type": "text"}, {"id": "published"}], "records": [ { u"b\xfck": "annakarenina", "author": "tolstoy", "published": "2005-03-01", "nested": ["b", {"moo": "moo"}], }, {u"b\xfck": "warandpeace", "author": "tolstoy", "nested": {"a": "b"}}, ], } postparams = "%s=1" % json.dumps(cls.data) auth = {"Authorization": str(cls.sysadmin_user.apikey)} res = cls.app.post("/api/action/datastore_create", params=postparams, extra_environ=auth) res_dict = json.loads(res.body) assert res_dict["success"] is True # Make an organization, because private datasets must belong to one. cls.organization = tests.call_action_api( cls.app, "organization_create", name="test_org", apikey=cls.sysadmin_user.apikey ) cls.expected_records = [ { u"_full_text": u"'annakarenina':1 'b':3 'moo':4 'tolstoy':2", u"_id": 1, u"author": u"tolstoy", u"b\xfck": u"annakarenina", u"nested": [u"b", {u"moo": u"moo"}], u"published": u"2005-03-01T00:00:00", }, { u"_full_text": u"'b':3 'tolstoy':2 'warandpeac':1", u"_id": 2, u"author": u"tolstoy", u"b\xfck": u"warandpeace", u"nested": {u"a": u"b"}, u"published": None, }, ] cls.expected_join_results = [{u"first": 1, u"second": 1}, {u"first": 1, u"second": 2}] engine = db._get_engine({"connection_url": pylons.config["ckan.datastore.write_url"]}) cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
def upgrade(migrate_engine): display_migration_details() metadata.bind = migrate_engine db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) ) HistoryDatasetAssociation_table = Table( "history_dataset_association", metadata, autoload=True ) # Load existing tables metadata.reflect() # Add 2 indexes to the galaxy_user table i = Index( 'ix_hda_extension', HistoryDatasetAssociation_table.c.extension ) try: i.create() except Exception as e: log.debug( "Adding index 'ix_hda_extension' to history_dataset_association table failed: %s" % ( str( e ) ) ) # Set the default data in the galaxy_user table, but only for null values cmd = "UPDATE history_dataset_association SET extension = 'qual454' WHERE extension = 'qual' and peek like \'>%%\'" try: db_session.execute( cmd ) except Exception as e: log.debug( "Resetting extension qual to qual454 in history_dataset_association failed: %s" % ( str( e ) ) ) cmd = "UPDATE history_dataset_association SET extension = 'qualsolexa' WHERE extension = 'qual' and peek not like \'>%%\'" try: db_session.execute( cmd ) except Exception as e: log.debug( "Resetting extension qual to qualsolexa in history_dataset_association failed: %s" % ( str( e ) ) ) # Add 1 index to the history_dataset_association table try: i.drop() except Exception as e: log.debug( "Dropping index 'ix_hda_extension' to history_dataset_association table failed: %s" % ( str( e ) ) )
def init(**config): if initialized(**config) and not config.get('force'): return attrs = _module_attrs(config.pop('module', None)) attrs['_initialized'] = True url = config.pop('url', {}) url.setdefault('drivername', 'mysql+oursql') url.setdefault('database', 'TagOpsDB') do_create = config.pop('create', False) if do_create: create_url = url.copy() db_name = create_url.pop('database') engine = sqlalchemy.create_engine(URL(**create_url), **config) engine.execute('CREATE DATABASE IF NOT EXISTS %s' % db_name) engine = sqlalchemy.create_engine( URL(**url), **config ) attrs['Base'].metadata.bind = engine if do_create: attrs['Base'].metadata.create_all(engine) attrs['Base'].Session = attrs['Session'] = scoped_session(sessionmaker()) attrs['Session'].configure(bind=engine)
def main(argv=sys.argv): if len(argv) != 2: usage(argv) config_uri = argv[1] settings = get_appsettings(config_uri) engine = engine_from_config(settings, 'sqlalchemy.') Session = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Session.configure(bind=engine) db = Session() with transaction.manager: testing = db.query(Update).filter_by(status=UpdateStatus.testing, request=None) for update in testing: # If this release does not have any testing requirements, skip it if not update.release.mandatory_days_in_testing: print('%s doesn\'t have mandatory days in testing' % update.release.name) continue # If this has already met testing requirements, skip it if update.met_testing_requirements: continue if update.meets_testing_requirements: print('%s now meets testing requirements' % update.title) text = config.get('testing_approval_msg') % update.days_in_testing update.comment(db, text, author='bodhi')
def register(cls, name, scoped): if scoped: cls.sessions[name] = scoped_session(sessionmaker( extension=ZopeTransactionExtension())) else: cls.sessions[name] = sessionmaker() return cls.sessions[name]
Lazy SQLAlchemy setup http://flask.pocoo.org/snippets/22/ """ import os import sys sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, create_session engine = None db_session = scoped_session(lambda: create_session(bind=engine)) def init_engine(uri, **kwargs): global engine engine = create_engine(uri, **kwargs) return engine def create_app(config): app = Flask(__name__) app.config.from_pyfile(config) init_engine(app.config['DATABASE_URI'])
# -*- coding: utf-8 -*- """The application's model objects""" import zope.sqlalchemy from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base # Global session manager: DBSession() returns the Thread-local # session object appropriate for the current web request. maker = sessionmaker(autoflush=True, autocommit=False) DBSession = scoped_session(maker) zope.sqlalchemy.register(DBSession) # Base class for all of our model classes: By default, the data model is # defined with SQLAlchemy's declarative extension, but if you need more # control, you can switch to the traditional method. DeclarativeBase = declarative_base() # There are two convenient ways for you to spare some typing. # You can have a query property on all your model classes by doing this: # DeclarativeBase.query = DBSession.query_property() # Or you can use a session-aware mapper as it was used in TurboGears 1: # DeclarativeBase = declarative_base(mapper=DBSession.mapper) # Global metadata. # The default metadata is the one from the declarative base. metadata = DeclarativeBase.metadata # If you have multiple databases with overlapping table names, you'll need a # metadata for each database. Feel free to rename 'metadata2'. # from sqlalchemy import MetaData
def Session(): engine = get_write_engine() Session = orm.scoped_session(orm.sessionmaker(bind=engine)) yield Session Session.close()
# coding: utf-8 from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base import datetime import os databese_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data.db') #ファイル名「data.db」、絶対パス(abspath)でファイルの保存先を指定 engine = create_engine(os.environ.get('DATABASE_URL') or 'sqlite:///' + databese_file, convert_unicode=True , echo=True) #どのDBを使うか: 本番環境はDATABASE_URL(herokuのPostgreSQL),ローカルは「sqlite:///」、echo=True:作業時にSQLを出力するか db_session = scoped_session( #オプションを記載(いじらなくてOK) sessionmaker( autocommit = False, #自動でコミット:しない autoflush = False, #自動反映: しない bind = engine ) ) Base = declarative_base() Base.query = db_session.query_property() def init_db(): #DBの初期化 import assets.models Base.metadata.create_all(bind=engine) def read_data(): #初期データの読込(csv) from assets import models #from assets.database import db_session import pandas as pd import datetime
app = Flask(__name__) app.secret_key = os.urandom(24) # Check for environment variable if not os.getenv("DATABASE_URL"): raise RuntimeError("DATABASE_URL is not set") # Configure session to use filesystem app.config["SESSION_PERMANENT"] = False app.config["SESSION_TYPE"] = "filesystem" # Create the Session object by passing it the application #Important: firstly i set up the "app" and then i do Session(app) Session(app) # Set up database #An engine is a common interface from sqlalchemy engine = create_engine(os.getenv("DATABASE_URL")) db = scoped_session(sessionmaker(bind=engine)) @app.route("/") def index(): if 'user_name' in session: user_name = session['user_name'] return redirect(url_for('home', user_name=user_name)) return redirect(url_for('login')) #wherever I can divide each function in GET and POST I do it. @app.route('/login', methods = ['GET', 'POST']) def login(): if request.method == 'GET': return render_template('login.html') else: user_name = request.form.get("user_name")
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from eNMS.setup import settings DATABASE_URL = settings["database"]["url"] DIALECT = DATABASE_URL.split(":")[0] engine_parameters = { "convert_unicode": True, "pool_pre_ping": True, "pool_recycle": 3600, } if DIALECT == "mysql": engine_parameters.update({ "max_overflow": settings["database"]["max_overflow"], "pool_size": settings["database"]["pool_size"], }) engine = create_engine(DATABASE_URL, **engine_parameters) Session = scoped_session(sessionmaker(autoflush=False, bind=engine)) Base = declarative_base()
class RetryingQuery(_Query): @retry def all(self): return super().all() @retry def count(self): return super().count() @retry def first(self): return super().first() #db_session = scoped_session(sessionmaker(class_=RoutingSession))#, query_cls=RetryingQuery)) db_session = scoped_session(sessionmaker(bind=_engine, query_cls=RetryingQuery)) Base = declarative_base() #set the shared redis cache for misc stuff r = redis.Redis( host=app.config["CACHE_REDIS_URL"][8:], decode_responses=True, ssl_cert_reqs=None, connection_pool=redispool) if app.config["CACHE_REDIS_URL"] else None #import and bind chat function #the if statement is needed because chat includes its own db session #and if it's not used then every worker connection will spawn a new db session #from ruqqus.chat.chat_routes import *
from sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker, exc from sqlalchemy.orm import scoped_session from catalog.dbsetup import Base, User, Catalog, Category, Item from catalog.models import CatalogModel from flask import session as appsession from catalog import catalogvars # Connect to Database and create database session engine = create_engine('postgresql://*****:*****@localhost/itemcatalog') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = scoped_session(DBSession) def get_categories(): ''' get all categories ''' catalog_id = CatalogModel.get_catalog_id() categories = session.query(Category).filter_by(catalog_id=catalog_id).all() return categories def get_category(category_name): ''' get category using category name ''' catalog_id = CatalogModel.get_catalog_id() category = session.query(Category).filter_by(
from sqlalchemy import create_engine, Column, Integer, String, ForeignKey from sqlalchemy.orm import scoped_session, sessionmaker, relationship from sqlalchemy.ext.declarative import declarative_base engine = create_engine('sqlite:///atividades.db', convert_unicode=True) db_session = scoped_session(sessionmaker(autocommit=False, bind=engine)) Base = declarative_base() Base.query = db_session.query_property() class Pessoas(Base): __tablename__='pessoas' id = Column(Integer, primary_key=True) nome = Column(String(40), index=True) idade = Column(Integer) def __repr__(self): return '<Pessoa {}>'.format(self.nome) def save(self): db_session.add(self) db_session.commit() def delete(self): db_session.delete(self) db_session.commit() class Atividades(Base): __tablename__='atividades' id = Column(Integer, primary_key=True)
def __init__(self, app): super().__init__(app) self.engine = create_engine( app.config["DATABASE_URI"], echo=app.config["DATABASE_ECHO"] ) #echo=app.config.get("DATABASE_ECHO", False)) self.session = scoped_session(sessionmaker(bind=self.engine))
import os #operating system library from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker engine = create_engine( os.getenv("DATABASE_URL") ) #creating a database engine "an object created by SQL alchemy that is used to manage connections to the database db = scoped_session( sessionmaker(bind=engine) ) #once we take our web application to the internet and we have multiple people that are simultaneously trying to use our website at the same time, we wanna make sure that the stuff that person A is doing with the database is kept separate from the stuff that person B is doing with the database def main(): flights = db.execute( "SELECT origin, destination, duration FROM flights" ).fetchall( ) #feeding in SQL syntax into db.execute to say i want to run all of this SQL code for flight in flights: #running a loop print( f"{flight.origin} to {flight.destination}, {flight.duration} minutes." ) #so flight is the name of one of those individual rows. to get just the origin of that row, I use flight.origin if __name__ == "__main__": main() #inside of flights(line10) i have a list of all the individual rows that came back from that SQL query.
from sqlalchemy import create_engine, Column, Integer, String from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker DB_URI = 'sqlite:///user.db' Session = sessionmaker(autocommit=False, autoflush=False, bind=create_engine(DB_URI)) session = scoped_session(Session) Base = declarative_base() class Users(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) name = Column(String(50)) email = Column(String(50)) def __init__(self, name, email): self.name = name self.email = email def to_json(self): to_serailize = ['id', 'name', 'email'] d = {} for attr in to_serailize: d[attr] = getattr(self, attr) return d if __name__ == "__main__": engine = create_engine(DB_URI) Base.metadata.drop_all(engine)
#!/usr/bin/env python3 """SQLAlchemy models for the application In a smaller application you could have all models in this file, but we assumed this will grow and have there split things up, in which case you should import you sub model at the bottom of this module. """ import sqlalchemy as sa from sqlalchemy import orm import sqlalchemy.sql as sasql from sqlalchemy.ext import declarative from sqlalchemy.ext.hybrid import hybrid_property maker = orm.sessionmaker(autoflush=True, autocommit=False) db_session = orm.scoped_session(maker) class ReprBase(object): """Extend the base class Provides a nicer representation when a class instance is printed. Found on the SA wiki, not included with TG """ def __repr__(self): return "%s(%s)" % ( (self.__class__.__name__), ', '.join(["%s=%r" % (key, getattr(self, key)) for key in sorted(self.__dict__.keys()) if not key.startswith('_')]))
import transaction import threading import multiprocessing import json from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker #from zope.sqlalchemy import ZopeTransactionExtension from codetcc.processamento.constantes import MAPABAIRROS, PATH from codetcc.processamento.rabbitmqflow.classesflow import ControlProccess from codetcc.processamento.tokenize import generateTokens from codetcc.domain.models import Bairro, Localidade engine = create_engine("postgresql+psycopg2://postgres:postgres@localhost/consultaCepRabbitmqJsonDB") DBSession = scoped_session(sessionmaker(bind=engine))() #DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))() lock = multiprocessing.Lock() def findLocalidade(chaveLocalidade): print("Chave Localidade: {}".format(chaveLocalidade)) return DBSession.query(Localidade.id).filter(Localidade.chave == chaveLocalidade).first() def createBairro(mapAttrs): localidade_id = findLocalidade(mapAttrs.get("chaveLocalidade")) print(localidade_id) bairro = Bairro(mapAttrs) bairro.localidade_id = localidade_id
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker Session = scoped_session(sessionmaker()) def bind_engine(engine_url='sqlite://'): engine = create_engine(engine_url) Session.configure(bind=engine) return engine
def scale_cron(): session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) user_ls = session.query(auth_info_table).filter(auth_info_table.c.token !="").all() session.close() for user_id, access_token in user_ls: get_scale(access_token, user_id)
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import scoped_session import settings def scope_func(): try: return request._get_current_object() except RuntimeError: return '__boo__' def remove_session(*_args): session.rollback() session.remove() def import_all(): # alembic helper from . import models Base = declarative_base() engine = create_engine(settings.DATABASE_URI, pool_recycle=600) Session = sessionmaker(bind=engine) session = scoped_session( lambda: Session(autoflush=False, expire_on_commit=False), scopefunc=scope_func)
get_next_fixture_for_player, get_player, get_recent_scores_for_player, ) from airsenal.framework.schema import (engine, SessionSquad, SessionBudget, Player) from airsenal.framework.squad import Squad from airsenal.framework.optimization_utils import ( make_optimum_transfer, make_optimum_double_transfer, ) DBSESSION = scoped_session(sessionmaker(bind=engine)) def remove_db_session(dbsession=DBSESSION): dbsession.remove() def create_response(orig_response, dbsession=DBSESSION): """ Add headers to the response """ response = jsonify(orig_response) response.headers.add( "Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, x-auth", )
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine, ForeignKey from sqlalchemy import Column, Integer, String, Float from sqlalchemy.orm import sessionmaker, scoped_session, relationship, backref engine = create_engine("sqlite:///septa.db", echo=True) session = scoped_session(sessionmaker(bind=engine, autocommit = False, autoflush = False)) Base = declarative_base() Base.query = session.query_property() ############################################################ #CLASSES class Paths(Base): __tablename__ = "paths" id = Column(Integer, primary_key=True) start_stop = Column(Integer, ForeignKey('stops.id'), nullable=False) end_stop = Column(Integer, ForeignKey('stops.id'), nullable=False) cost = Column(Integer, nullable=False) class Routes(Base): __tablename__ = "routes"
def reload(self): """reloads data from the database""" Base.metadata.create_all(self.__engine) sess_factory = sessionmaker(bind=self.__engine, expire_on_commit=False) Session = scoped_session(sess_factory) self.__session = Session
from sqlalchemy.orm import sessionmaker, scoped_session from database.world.WorldModels import * from game.world.managers.objects.units.creature.CreatureSpellsEntry import CreatureSpellsEntry from utils.ConfigManager import * from utils.constants.MiscCodes import HighGuid DB_USER = os.getenv('MYSQL_USERNAME', config.Database.Connection.username) DB_PASSWORD = os.getenv('MYSQL_PASSWORD', config.Database.Connection.password) DB_HOST = os.getenv('MYSQL_HOST', config.Database.Connection.host) DB_WORLD_NAME = config.Database.DBNames.world_db world_db_engine = create_engine( f'mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_WORLD_NAME}?charset=utf8mb4', pool_pre_ping=True) SessionHolder = scoped_session( sessionmaker(bind=world_db_engine, autocommit=True, autoflush=True)) class WorldDatabaseManager(object): # Player stuff. @staticmethod def player_create_info_get(race, class_) -> Optional[Playercreateinfo]: world_db_session = SessionHolder() res = world_db_session.query(Playercreateinfo).filter_by( race=race, _class=class_).first() world_db_session.close() return res @staticmethod def player_create_spell_get(race, class_) -> list[PlayercreateinfoSpell]:
Geometry, ) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import ( scoped_session, sessionmaker, relationship, ) from sqlalchemy.sql import select from zope.sqlalchemy import ZopeTransactionExtension DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())) Base = declarative_base() stopsRoutes = Table('stops_routes', Base.metadata, Column('stop_id', Integer, ForeignKey('stops.id'), nullable=False, primary_key=True), Column('route_id', Integer, ForeignKey('routes.id'), nullable=False, primary_key=True) ) stopsStopType = Table('stops_stop_types', Base.metadata, Column('stop_id', Integer, ForeignKey('stops.id'), nullable=False, primary_key=True), Column('stop_type_id', Integer, ForeignKey('stop_types.id'), nullable=False, primary_key=True) ) stopsPhotos = Table('stops_photos', Base.metadata, Column('stop_id', Integer, ForeignKey('stops.id'), nullable=False, primary_key=True), Column('photo_id', Integer, ForeignKey('photos.id'), nullable=False, primary_key=True)
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String, Text from sys import argv from pyquery import PyQuery from lxml import etree import urllib import json #please write your own mysql connection here engine = create_engine( 'mysql://*****:*****@plover.arvixe.com:3306/vergili1_deneme?charset=utf8', pool_size=80, echo=False) session_factory = sessionmaker(bind=engine) Session = scoped_session(session_factory) #Session = sessionmaker(bind=engine) Base = declarative_base() #All text area defined as a TEXT of course small lengt text can be selected as a string. class Linkedin(Base): __tablename__ = 'linkedin' id = Column(Integer, primary_key=True) name = Column(Text(collation='utf8_general_ci')) heading = Column(Text(collation='utf8_general_ci')) location = Column(Text(collation='utf8_general_ci'))
def start() -> scoped_session: engine = create_engine(DB_URI, client_encoding="utf8") BASE.metadata.bind = engine BASE.metadata.create_all(engine) return scoped_session(sessionmaker(bind=engine, autoflush=False))
def create_single_session(cls, url, scopefunc=None): engine = create_engine(url, pool_recycle=7200, pool_size=50) return engine, scoped_session(sessionmaker(expire_on_commit=False, bind=engine), scopefunc=scopefunc)
def __init__(self): from tweeter import application engine = create_engine(application.config['SQLALCHEMY_DATABASE_URI']) session_factory = sessionmaker(bind=engine) self.session_class = scoped_session(session_factory)
def main(): logging.info("Starting main") if config.post_to_reddit: logging.info("Warning: Bot will actually post to Reddit!") logging.info("Setting up database") killhandler = KillHandler() engine = create_engine(config.db, pool_recycle=60) sm = scoped_session(sessionmaker(bind=engine)) worker = CommentWorker(sm) while True: try: Base.metadata.create_all(engine) break except sqlalchemy.exc.OperationalError: logging.info("Database not available yet; retrying in 5s") time.sleep(5) logging.info("Setting up Reddit connection") reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) stopwatch = Stopwatch() logging.info("Listening for inbox replies...") while not killhandler.killed: try: # Iterate over the latest comment replies in inbox reply_function = reddit.inbox.comment_replies if (config.maintenance): logging.info("ENTERING MAINTENANCE MODE. NO OPERATIONS WILL BE PROCESSED.") for comment in praw.models.util.stream_generator(reply_function): logging.info(f"New comment {comment}:") if comment.new: comment.reply_wrap(message.maintenance_org) comment.mark_read() for comment in praw.models.util.stream_generator(reply_function): # Measure how long since we finished the last loop iteration duration = stopwatch.measure() logging.info(f"New comment {comment}:") logging.info(f" -- retrieved in {duration:5.2f}s") if comment.new: # Process the comment worker(comment) # Mark the comment as processed comment.mark_read() else: logging.info(" -- skipping (already processed)") # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info(f" -- API calls remaining: {rem:3d}, resetting in {res:3d}s") # Check for termination requests if killhandler.killed: logging.info("Termination signal received - exiting") break stopwatch.reset() except prawcore.exceptions.OAuthException as e_creds: traceback.print_exc() logging.error(e_creds) logging.critical("Invalid login credentials. Check your .env!") logging.critical("Fatal error. Cannot continue or fix the problem. Bailing out...") exit() except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10)
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine, Column, Integer, String, PickleType from sqlalchemy.orm import sessionmaker, scoped_session engine = create_engine("sqlite:///passwords.db", echo=False) session = scoped_session( sessionmaker(bind=engine, autocommit=False, autoflush=False) ) # scoped_session is being used to guarantee thread-safety for multiple users accessing this same app Base = declarative_base() Base.query = session.query_property() ### Class declarations go here class User(Base): __tablename__ = "users" id = Column(Integer, primary_key=True) email = Column(String(64), nullable=False) password = Column( PickleType, nullable=False ) # Holds Python objects, which are serialized using pickle threshold = Column(Integer, nullable=False) ### End class declarations def main(): """In case we need this for something""" pass
from sqlalchemy import (orm, create_engine) # from sqlalchemy.dialects.mysql import TINYINT from sqlalchemy.ext.declarative import declarative_base from api.config.app_config import app_config CONFIG = app_config() Model = declarative_base() engine = create_engine(CONFIG.SQLALCHEMY_DATABASE_URI) # Why pool_recycle : http://docs.sqlalchemy.org/en/rel_0_9/dialects/mysql.html#connection-timeouts _Session = orm.sessionmaker(autocommit=False, autoflush=True, bind=engine) mysql_session = orm.scoped_session(_Session) Model.metadata.bind = engine Model.query = mysql_session.query_property() def mongo_connect(): from mongoengine import connect DB_NAME = 'customer' db = connect(DB_NAME) # db = db.customer # def mongo_con(): # from pymongo import MongoClient # client = MongoClient('mongodb://localhost:27017/') # DB_NAME = 'customer' # db = client[db] def mongo_close(mongo_connect): db.close()
def get_db_session(engine): return scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=engine))