def test_pg_dialect_use_native_unicode_from_config(self): config = {"sqlalchemy.url": testing.db.url, "sqlalchemy.use_native_unicode": "false"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, False) config = {"sqlalchemy.url": testing.db.url, "sqlalchemy.use_native_unicode": "true"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, True)
def test_pg_dialect_use_native_unicode_from_config(self): config = { 'sqlalchemy.url': 'postgresql://*****:*****@somehost/test', 'sqlalchemy.use_native_unicode': "false"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, False) config = { 'sqlalchemy.url': 'postgresql://*****:*****@somehost/test', 'sqlalchemy.use_native_unicode': "true"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, True)
def test_pg_dialect_use_native_unicode_from_config(self): config = { 'sqlalchemy.url': testing.db.url, 'sqlalchemy.use_native_unicode': "false"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, False) config = { 'sqlalchemy.url': testing.db.url, 'sqlalchemy.use_native_unicode': "true"} e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, True)
def configure_sqlalchemy(settings): # pragma: no cover """Configure SQLAlchemy with the given settings.""" engine = engine_from_config(settings, "sqlalchemy.") Base.metadata.bind = engine dbmaker = sessionmaker() dbmaker.configure(bind=engine) return dbmaker
def setup_db_connection_from_ini(settings, prefix, metadata_func, datasource_name='', allow_schema_create=False): ''' Setup a generic db connection @param settings: the settings file path @param prefix: the prefix @param metadata: the metadata object @param datasource_name: the datasource name @param allow_create: determines if a schema can be created ''' extra = {} if prefix + 'pool_size' not in settings.keys(): extra['poolclass'] = NullPool schema_key = prefix + 'schema_name' schema_name = settings.get(schema_key) settings.pop(schema_key, None) engine = engine_from_config(settings, prefix, **extra) metadata = None if metadata_func: metadata = metadata_func(schema_name=schema_name, bind=engine) # Create schema and its tables if allow_schema_create is True: connection = engine.connect() try: connection.execute(CreateSchema(metadata.schema)) except DBAPIError: # catch exception if schema already exist pass finally: connection.close() # issue CREATEs only for tables that are not present metadata.create_all(bind=engine, checkfirst=True) # zope registration dbUtil = DbUtil(engine=engine, metadata=metadata) component.provideUtility(dbUtil, IDbUtil, name=datasource_name)
def test_pg_dialect_use_native_unicode_from_config(self): config = { 'sqlalchemy.url': 'postgresql://*****:*****@somehost/test', 'sqlalchemy.use_native_unicode': "false" } e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, False) config = { 'sqlalchemy.url': 'postgresql://*****:*****@somehost/test', 'sqlalchemy.use_native_unicode': "true" } e = engine_from_config(config, _initialize=False) eq_(e.dialect.use_native_unicode, True)
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # connectable = engine_from_config( # config.get_section(config.config_ini_section), # prefix="sqlalchemy.", # poolclass=pool.NullPool, # ) alembic_config = config.get_section(config.config_ini_section) alembic_config['sqlalchemy.url'] = sqlalchemy_uri connectable = engine_from_config( alembic_config, prefix='sqlalchemy.', poolclass=pool.NullPool ) # connectable = create_engine(sqlalchemy_uri) with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata ) with context.begin_transaction(): context.run_migrations()
def includeme(config): config.include('pyramid_caching.ext.sqlalchemy') engine = engine_from_config(config.registry.settings) metadata.create_all(engine) Session.configure(bind=engine) config.register_sqlalchemy_caching(Session, Base) config.scan(__name__)
def _make_sa_engine(config): from sqlalchemy.engine import engine_from_config sa_prefix = 'sqlalchemy_engine' sa_config = config.sqlalchemy_config if sa_prefix in sa_config: section = sa_config[sa_prefix] return engine_from_config(section, '') else: engine_bindings = {} for section_name, section in sa_config.viewitems(): if section_name.startswith(sa_prefix): model_definition = section_name[len(sa_prefix) + 1:] pkg_name, obj_name = model_definition.rsplit('.', 1) package = __import__(pkg_name, globals(), locals(), [obj_name]) model = getattr(package, obj_name) engine_bindings[model] = engine_from_config(section) return engine_bindings
def make_sqlalchemy_engine(self, prefix="sqlalchemy_engine"): config = self.config.sqlalchemy_config if prefix in config: section = config[prefix] from sqlalchemy.engine import engine_from_config return engine_from_config(section, '') else: engine_bindings = {} for section_name, section in config.viewitems(): if section_name.startswith(prefix): model_fqn = section_name[len(prefix) + 1:] model_fqn_parts = model_fqn.rsplit('.', 1) model_mod = __import__(model_fqn_parts[0], globals(), locals(), [model_fqn_parts[1]]) model = getattr(model_mod, model_fqn_parts[1]) engine_bindings[model] = engine_from_config(section) return engine_bindings
def includeme(config): engine = engine_from_config(config.registry.settings) utility = Utility(engine, sessionmaker(bind=engine)) config.registry.registerUtility(utility) config.add_request_method(utility.open_session, 'sqla_session', reify=True) config.add_directive('get_sqlalchemy_utility', directive_get_sqlalchemy_utility, action_wrap=False)
def __init__(self, app_factory, should_use_tx=True, **kwargs): self.app_factory = app_factory self.should_use_tx = should_use_tx self.base = kwargs.get('base', bm.Base) self.catch_errors = kwargs.get('catch_errors', support.CatchErrors) self.global_config = kwargs.get('global_config', None) self.json_method = kwargs.get('get_json', webtest.utils.json_method) self.session = kwargs.get('session', bm.Session) self.test_app = kwargs.get('test_app', webtest.TestApp) self.test_settings = kwargs.get('test_settings', settings.TEST_SETTINGS) self.has_created = False self.engine = engine.engine_from_config(self.test_settings, prefix='sqlalchemy.') self.conn = self.engine.connect()
def main(global_config, **settings): # Setup engine. engine = engine_from_config(settings, 'sqlalchemy.') init_database(engine) # App Configuration config = Configurator(root_factory=Root, settings=settings) config.add_static_view('static', 'cmbalance:static') config.scan('cmbalance.views') app = SessionFixMiddleware(config.make_wsgi_app()) # Return the generated WSGI application. return app
def main(global_config, **settings): # pragma: no cover """This function returns a Pyramid WSGI application. :param global_config: A :type:`dict` containing global config. :param settings: A :type:`dict` containing values from INI. :type global_config: dict :type settings: dict :rtype: pyramid.router.Router """ config = Configurator(settings=normalize_settings(settings)) config.include('pyramid_mako') config.include('pyramid_beaker') engine = engine_from_config(config.registry.settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine cache_region.configure_from_config(config.registry.settings, 'dogpile.') redis_conn.from_url(config.registry.settings['redis.url']) celery.config_from_object(configure_celery(config.registry.settings)) identity.configure_tz(config.registry.settings['app.timezone']) akismet.configure_key(config.registry.settings['app.akismet_key']) dnsbl.configure_providers(config.registry.settings['app.dnsbl_providers']) geoip.configure_geoip2(config.registry.settings['app.geoip2_database']) checklist.configure_checklist(config.registry.settings['app.checklist']) proxy_detector.configure_from_config( config.registry.settings, 'app.proxy_detect.') config.set_request_property(remote_addr) config.set_request_property(route_name) config.add_request_method(tagged_static_path) config.add_route('robots', '/robots.txt') config.include('fanboi2.serializers') config.include('fanboi2.views.pages', route_prefix='/pages') config.include('fanboi2.views.api', route_prefix='/api') config.include('fanboi2.views.boards', route_prefix='/') config.add_static_view('static', 'static', cache_max_age=3600) config.scan() return config.make_wsgi_app()
def setup_db(self) -> Engine: from quiz_bot.db.base import metadata engine = engine_from_config( { 'url': self.url, "pool_recycle": self.pool_recycle, "pool_pre_ping": True, "pool_size": self.pool_size, "poolclass": QueuePool, "connect_args": { 'connect_timeout': self.connection_timeout, 'application_name': self.application_name }, }, prefix="", ) metadata.bind = engine return engine # noqa: R504
def main(global_config, **settings): # pragma: no cover """This function returns a Pyramid WSGI application. :param global_config: A :type:`dict` containing global config. :param settings: A :type:`dict` containing values from INI. :type global_config: dict :type settings: dict :rtype: pyramid.router.Router """ config = Configurator(settings=normalize_settings(settings)) config.include('pyramid_mako') config.include('pyramid_beaker') engine = engine_from_config(config.registry.settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine cache_region.configure_from_config(config.registry.settings, 'dogpile.') redis_conn.from_url(config.registry.settings['redis.url']) celery.config_from_object(configure_celery(config.registry.settings)) identity.configure_tz(config.registry.settings['app.timezone']) akismet.configure_key(config.registry.settings['app.akismet_key']) dnsbl.configure_providers(config.registry.settings['app.dnsbl_providers']) proxy_detector.configure_from_config( config.registry.settings, 'app.proxy_detect.') config.set_request_property(remote_addr) config.set_request_property(route_name) config.add_request_method(tagged_static_path) config.add_route('robots', '/robots.txt') config.include('fanboi2.serializers') config.include('fanboi2.views.pages', route_prefix='/pages') config.include('fanboi2.views.api', route_prefix='/api') config.include('fanboi2.views.boards', route_prefix='/') config.add_static_view('static', 'static', cache_max_age=3600) config.scan() return config.make_wsgi_app()
def __init__(self): """ Constructor __init__(Connection) :since: v1.0.0 """ self.local = None """ Local data handle """ self._log_handler = NamedLoader.get_singleton("dNG.data.logging.LogHandler", False) """ The LogHandler is called whenever debug messages should be logged or errors happened. """ if (Connection._sa_engine is None): with Connection._instance_lock: # Thread safety if (Connection._sa_engine is None): Connection._sa_engine = engine_from_config(Settings.get_dict(), prefix = "pas_database_sqlalchemy_" )
class Thing(object): """ """ pass # much code deleted def main(args, session): """ """ pass if __name__ == '__main__': # code to execute if called from command-line import argparse parser = argparse.ArgumentParser(description='Bulk loader') parser.add_argument('configfile', help='Configuration file') parser.add_argument('batch_id', help='batch_id to restart') args = parser.parse_args() settings = get_appsettings(args.configfile) engine = engine_from_config(settings, 'sqlalchemy.') # Shut up SQLAlchemy logging.basicConfig() log.setLevel(logging.DEBUG) logging.getLogger('sqlalchemy.engine').setLevel(logging.WARN) Session = sessionmaker(bind=engine) # create a Session session = Session() main(args, session)
# encoding:utf-8 ''' Celery 任务配置 ''' from __future__ import absolute_import import logging from celery import Celery from config import CeleryConfig from sqlalchemy.engine import engine_from_config from sqlalchemy.orm import sessionmaker, scoped_session app = Celery('novel') app.config_from_object(CeleryConfig) sa_engine = engine_from_config(app.conf['SQLALCHEMY_OPTIONS'], prefix='') app.DBSession = sessionmaker(bind=sa_engine) logger = logging.getLogger('nobot.celery') logger.setLevel(logging.INFO) formatter = logging.Formatter('[%(asctime)s]-[%(name)s]-[%(levelname)s]: %(message)s') s_handler = logging.StreamHandler() s_handler.setLevel(logging.WARNING) s_handler.setFormatter(formatter) logger.addHandler(s_handler) app.logger = logger
def setup_app(command, conf, vars): engine = engine_from_config(conf, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine Base.metadata.create_all(engine) populate()
def get_engine(settings, prefix='sqlalchemy.'): return engine_from_config(settings, prefix=prefix)
def create_engine(self): return engine_from_config( {'url': self._db_url, 'pool_recycle': 60}, prefix='' )
def configure_db(config_obj, prefix, db_basename=None): """ Permet de configurer la base de données. @param config_obj: Objet contenant la configuration. @type config_obj: C{ConfigObj} ou C{PylonsConfig} @param prefix: Préfixe des paramètres de configuration liés à la base de données. @type prefix: C{basestring} @param db_basename: Préfixe des noms de tables de Vigilo. @type db_basename: C{basestring} @return: L'Engine configuré, utilisable par SQLAlchemy. @note: Le paramètre L{db_basename} N'EST PLUS utilisé. À la place, la valeur de la clé "db_basename" dans config_obj est automatiquement utilisée. """ if db_basename is not None: import warnings warnings.warn(DeprecationWarning("Passing a third argument to configure_db() is now deprecated.")) # Permet de déterminer si l'objet config_obj est une configuration # de TurboGears (Pylons) ou bien un objet ConfigObj plus standard. using_tg = False try: from config import ConfigObj except ImportError: using_tg = True else: using_tg = not isinstance(config_obj, ConfigObj) # Paramétrage du modèle. Doit être fait en tout premier. # vigilo.models.session dépend de cette initialisation. # pylint: disable-msg=W0603 # W0603: Using the global statement global DB_BASENAME, DEFAULT_LANG, HASHING_FUNC # Préfixe des noms de tables. DB_BASENAME = config_obj.get("db_basename", "") # Langue par défaut des utilisateurs. DEFAULT_LANG = config_obj.get("lang", None) # Fonction de hachage des mots de passe. HASHING_FUNC = config_obj.get("password_hashing_function", None) import vigilo.models.session as session # Si la connexion à la base de données est déjà configurée, # on se contente de renvoyer l'objet déjà configuré. if session.metadata.bind is not None: return session.metadata.bind # ZTE session. # We must go through transaction (a zodb extraction) to commit, rollback. # There's also a session context to hold managed data, and the # ZopeTransactionExtension makes that mostly transparent. # The ZopeTransactionExtension prevents us # from committing, etc, the session directly. from sqlalchemy.engine import engine_from_config engine = engine_from_config(config_obj, prefix=prefix) session.DBSession.configure(bind=engine) session.metadata.bind = session.DBSession.bind return engine
def includeme(config): engine = engine_from_config(config.registry.settings) Session.configure(bind=engine) Base.metadata.bind = engine Base.prepare(engine)
def _configure_engines(self): """Sets up engine bindings based on the given config. Given a configuration dictionary, and optionally a key `prefix`, this method iterates all its sections looking for sections with names that start with `prefix`. The suffix of the name is fully qualified name to a model. A SQLAlchemy engine is then set up with the section value as configuration parameters to `sqlalchemy.engine_form_config`. The configured engine bindings are to be found in `cherrypy.engine.sqlalchemy.engine_bindings` (or wherever you've attached this plugin to). If there is a section that is named exactly the same as the `prefix`, that section's values are use to configure only one SQLAlchemy engine attached to `cherrypy.engine.sqlalchemy.engine`. Example:: # The model to be imported starts after the _ following the prefix [sqlalchemy_engine_myproject.models.User] url = ... pool_recycle = ... # If this section exists, only 1 engine will be configured [sqlalchemy_engine] url = ... :py:func: sqlalchemy.engine_from_config """ try: from sqlalchemy.engine import engine_from_config except ImportError as e: self.bus.log(textwrap.dedent(""" SQLAlchemy not installed. Please use install it first before proceding: $ pip install sqlalchemy """)) else: if self.prefix in self.config: section = self.config[self.prefix] self.engine = engine_from_config(section, '') self.bus.log("SQLAlchemy engine configured") else: engine_bindings = {} for section_name, section in self.config.viewitems(): if section_name.startswith(self.prefix): model_fqn = section_name[len(self.prefix) + 1:] model_fqn_parts = model_fqn.rsplit('.', 1) try: model_mod = __import__(model_fqn_parts[0], globals(), locals(), [model_fqn_parts[1]]) except ImportError as e: self.bus.log(e, level=40) else: model = getattr(model_mod, model_fqn_parts[1]) engine_bindings[model] = engine_from_config(section, '') self.engine_bindings = engine_bindings self.bus.log("SQLAlchemy engines configured")
def setUp(self): self.config = ConfigParser.SafeConfigParser() self.config.read('../../development.ini') settings = self.config._sections["app:main"] engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.create_all(engine) self.session = DBSession() self.auth = CredentialsAuthentication(self.config.get("app:main", "ingesterapi.username"), self.config.get("app:main", "ingesterapi.password")) self.ingester_api = IngesterAPIWrapper(self.config.get("app:main", "ingesterapi.url"), self.auth) self.project = Project() # self.project.description = "This is a test description for the DC24 provisioning interface" # self.project.no_activity = True # self.project.project_title = "This is a test title for the test DC24 project" # self.project.data_manager = "A Person" # self.project.project_lead = "Another Person" # self.project.brief_description = "This is a test brief description" # self.project.full_description = "This is a test full description" # keyword1 = Keyword() # keyword1.id = 0 # keyword1.project_id = self.project.id # keyword1.keyword = "Test Keyword" # self.project.keywords.append(keyword1) # for1 = FieldOfResearch() # for1.id = 0 # for1.project_id = self.project.id # for1.field_of_research = "010101" # self.project.fieldOfResearch.append(for1) # # seo1 = FieldOfResearch() # seo1.id = 0 # seo1.project_id = self.project.id # seo1.field_of_research = "010101" # self.project.socioEconomicObjective.append(seo1) # self.project.ecosystems_conservation_climate = True # self.project.typeOfResearch = "applied" # self.project.time_period_description = "Test time period description " + str(self.project.id) # self.project.date_from = 12345 # self.project.date_to = 1234 # self.project.location_description = "Test location description" test_location = Location() test_location.name = "Test Location" test_location.location = "POINT(135.8763427287297 -24.167471616893767)" test_location.elevation = 12.3 self.project.information = Metadata() self.project.information.locations.append(test_location) self.project.information.retention_period = "5" self.project.metadata.national_significance = False method1 = Method() method1.method_name = "Artificial tree sensor" method1.method_description = "A custom developed sensor consisting of a calibrated temperature sensor and a humidity sensor (which also has an uncalibrated temperature sensor within it)" method1.data_source = PullDataSource.__tablename__ temperature_schema = self.session.query(MethodSchema).filter_by(id=1).first() method1.data_type = MethodSchema() method1.data_type.name = "Test Schema" method1.data_type.parents.append(temperature_schema) # This is the default template schema that is setup on first run within scripts\initialise_database.py # The data entry location offset functionality has been changed # offset_schema = MethodSchema() # offset_schema.id = 1 # offset_schema.template_schema = True # offset_schema.name = "XYZ Offset Schema" # offset = LocationOffset() # # x_offset = MethodSchemaField() # x_offset.id = 0 # x_offset.method_schema_id = offset_schema.id # x_offset.type = "Double" # x_offset.units = "m" # offset_schema.custom_fields.append(x_offset) # # y_offset = MethodSchemaField() # y_offset.id = 1 # y_offset.method_schema_id = offset_schema.id # y_offset.type = "Double" # y_offset.units = "m" # offset_schema.custom_fields.append(y_offset) # # z_offset = MethodSchemaField() # z_offset.id = 2 # z_offset.method_schema_id = offset_schema.id # z_offset.type = "Double" # z_offset.units = "m" # offset_schema.custom_fields.append(z_offset) # # method1.data_type.parents.append(offset_schema) custom_field = MethodSchemaField() custom_field.name = "Distance" custom_field.type = "file" custom_field.units = "text/cvs" method1.data_type.custom_fields.append(custom_field) self.project.methods.append(method1) self.session.add(method1) self.session.flush() dataset1 = Dataset() dataset1.method_id = method1.id dataset1.disabled = False dataset1.description = "Test dataset" data_source = PullDataSource() data_source.uri = "http://localhost/test_ingestion" data_source.mime_type = custom_field.units data_source.selected_sampling = PullDataSource.periodic_sampling.key data_source.file_field = custom_field.id data_source.periodic_sampling = 1 dataset1.pull_data_source = data_source dataset1.time_period_description = "Test dataset time description" dataset1.date_from = 1234 dataset1.date_to = 1234 dataset1.location_description = "Test dataset location description" dataset1.elevation = 12.5 # If project location is set: # Allow user to provide offset only (set dataset location to project location) # Else: # Must set location (with optional offset) # TODO: For locations in project: add as region to location dataset_location = Location() dataset_location.name = "Test Dataset Location" dataset_location.location = "POINT(132.8763427287297 -24.167471616893767)" dataset_location.elevation = 12.6 dataset1.dataset_locations.append(dataset_location) location_offset = LocationOffset(0, 0, 5) dataset1.location_offset = location_offset self.project.datasets.append(dataset1) self.session.add(self.project) self.session.flush()