def test_distill_raw_dict(self): eq_(self.module._distill_raw_params({"foo": "bar"}), [{"foo": "bar"}]) eq_( self.module._distill_raw_params(immutabledict({"foo": "bar"})), [immutabledict({"foo": "bar"})], ) eq_( self.module._distill_raw_params(MappingProxyType({"foo": "bar"})), [MappingProxyType({"foo": "bar"})], )
def url_kwargs(self): kwargs = {"domain": self.domain, "report_slug": self.report_slug} if self.subreport_slug: kwargs["subreport_slug"] = self.subreport_slug return immutabledict(kwargs)
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): if attrname in self._cache: raise AttributeError('Attibute {} is already mapped to {}'.format( attrname, class_mapper(self._cache[attrname]).mapped_table)) if tablename is not None: if not isinstance(tablename, str): raise ArgumentError( 'Tablename must be a string, type found: {}'.format( type(tablename))) if selectable is not None: raise ArgumentError( '`tablename` and `selectable` are mutually exclusive') selectable = Table(tablename, self.metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError( '`tablename` argument is required when using schema.') elif selectable is not None: if not isinstance(selectable, expression.FromClause): raise ArgumentError('`selectable` argument must be a table,' 'select, joing, or other construct.') else: raise ArgumentError( '`tablename` or `selectable` argument is required.') if not selectable.primary_key.columns and not \ 'primary_key' in mapper_args: if tablename: raise NoForeignKeysError( 'table {} does not have a primary key defined'.format( tablename)) else: raise NoForeignKeysError( 'Selectable {} does not have a primary key defined.'. format(selectable)) mapped_cls = class_for_table(self.sess, self.engine, selectable, base or self.base, mapper_args) self._cache[attrname] = mapped_cls return mapped_cls
def url_kwargs(self): kwargs = { 'domain': self.domain, 'report_slug': self.report_slug, } if self.subreport_slug: kwargs['subreport_slug'] = self.subreport_slug return immutabledict(kwargs)
def __init__(self, dialect, statement, column_keys=None, inline=False, **kwargs): if isinstance(statement, Column): kwargs["compile_kwargs"] = util.immutabledict( {"include_table": False}) super(BigQueryCompiler, self).__init__(dialect, statement, column_keys, inline, **kwargs)
def recipients_by_language(self): user_languages = { user['username']: user['language'] for user in get_user_docs_by_username(self.all_recipient_emails) if 'username' in user and 'language' in user } fallback_language = user_languages.get(self.owner_email, 'en') recipients = defaultdict(list) for email in self.all_recipient_emails: language = user_languages.get(email, fallback_language) recipients[language].append(email) return immutabledict(recipients)
def __init__(self, dialect, statement, bind=None, schema_translate_map=None, compile_kwargs=util.immutabledict()): self._preparer = AthenaDDLIdentifierPreparer(dialect) super(AthenaDDLCompiler, self).__init__(dialect=dialect, statement=statement, bind=bind, schema_translate_map=schema_translate_map, compile_kwargs=compile_kwargs)
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): tbl = Table(tablename, self._metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) # make a fake primary key pids = [x for x in tbl.columns if x.name in ('chrom', 'chromStart', 'name', 'txStart', 'kgID') or x.primary_key or x.unique] models = __import__("cruzdb.models", globals(), locals(), [], -1).models try: base = getattr(models, tablename) except AttributeError: base = models.Feature mapper_args = dict(mapper_args) mapper_args['primary_key'] = pids return sqlsoup.SQLSoup.map_to(self, attrname, tablename, selectable, schema, base=base, mapper_args=mapper_args)
def on_new_db(self): settings = get_settings() db_path = settings.value('db_path') file_name, _ = QFileDialog.getSaveFileName( self, "Create a new database", dir=db_path, filter='Player databases (*.zpl)', options=QFileDialog.DontUseNativeDialog) if not file_name: return script_path = str(Path(__file__).parent / 'db') database_path = Path(file_name).absolute() settings.setValue('db_path', str(database_path)) database_url = get_database_url(database_path) alembic_config = Config( config_args=immutabledict({ 'script_location': script_path, 'sqlalchemy.url': database_url })) command.upgrade(alembic_config, 'head')
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): tbl = Table(tablename, self._metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) # make a fake primary key pids = [x for x in tbl.columns if x.name in ('chrom', 'chromStart', 'name', 'txStart', 'kgID') or x.primary_key or x.unique] if pids == []: pids = [x for x in tbl.columns if any(c in x.name.lower() for c in 'chrom start name'.split())] models = import_module("cruzdb.models") try: base = getattr(models, tablename) except AttributeError: base = models.Feature mapper_args = dict(mapper_args) mapper_args['primary_key'] = pids return sqlsoup.SQLSoup.map_to(self, attrname, tablename, selectable, schema, base=base, mapper_args=mapper_args)
def test_serialize(self): d = util.immutabledict({1: 2, 3: 4}) for loads, dumps in picklers(): print loads(dumps(d))
from sqlalchemy import Column, Integer, MetaData, String, Table, create_engine from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.engine.url import URL from sqlalchemy.util import immutabledict from sqlalchemy_postgresql_audit import install_audit_triggers NAMING_CONVENTIONS = immutabledict({ "ix": "ix_%(column_0_label)s", "uq": "uq_%(table_name)s_%(column_0_name)s", "ck": "ck_%(table_name)s_%(constraint_name)s", "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", "pk": "pk_%(table_name)s", "audit.table": "%(table_name)s_audr", }) meta = MetaData(naming_convention=NAMING_CONVENTIONS) url = URL( drivername="postgresql+psycopg2", host="localhost", port=5432, password="******", username="******", ) engine = create_engine(url, plugins=["audit"]) engine.echo = True meta.bind = engine t = Table("foo", meta, Column("bar", String),
def history_mapper(local_mapper): cls = local_mapper.class_ for prop in local_mapper._props: local_mapper._props[prop].active_history = True super_mapper = local_mapper.inherits super_history_mapper = getattr(cls, "__history_mapper__", None) polymorphic_on = None super_fks = [] def _col_copy(col): copy = col.copy() col.info["history_copy"] = copy copy.unique = False copy.default = None copy.server_default = None return copy # we don't create copies of these columns on the version table b/c we don't save them anyways untracked_cols = set(getattr(cls, "__chrononaut_untracked__", [])) hidden_cols = set(getattr(cls, "__chrononaut_hidden__", [])) noindex_cols = set(getattr(cls, "__chrononaut_disable_indices__", [])) properties = util.OrderedDict() if not super_mapper or local_mapper.local_table is not super_mapper.local_table: cols = [] # add column.info to identify columns specific to versioning version_meta = {"version_meta": True} for column in local_mapper.local_table.c: if ("version_meta" in column.info or column.key in hidden_cols # noqa or column.key in untracked_cols # noqa ): continue col = _col_copy(column) # disable user-specified column indices on history tables, if indicated if col.index is True and column.key in noindex_cols: col.index = None if super_mapper and col_references_table(column, super_mapper.local_table): super_fks.append( (col.key, list(super_history_mapper.local_table.primary_key)[0])) cols.append(col) if column is local_mapper.polymorphic_on: polymorphic_on = col orig_prop = local_mapper.get_property_by_column(column) # carry over column re-mappings if len(orig_prop.columns ) > 1 or orig_prop.columns[0].key != orig_prop.key: properties[orig_prop.key] = tuple(col.info["history_copy"] for col in orig_prop.columns) if super_mapper: super_fks.append( ("version", super_history_mapper.local_table.c.version)) # "version" stores the integer version id. This column is required. cols.append( Column("version", Integer, primary_key=True, autoincrement=False, info=version_meta)) # "changed" column stores the UTC timestamp of when the history row was created. # This column is optional and can be omitted. cols.append( Column( "changed", DateTime(timezone=True), default=lambda: datetime.now(pytz.utc), info=version_meta, )) # Append some JSON metadata about the change too cols.append( Column("change_info", postgresql.JSONB, default=None, info=version_meta)) if super_fks: cols.append(ForeignKeyConstraint(*zip(*super_fks))) history_tablename = getattr(cls, "__chrononaut_tablename__", local_mapper.local_table.name + "_history") table = Table(history_tablename, local_mapper.local_table.metadata, *cols, schema=local_mapper.local_table.schema) else: # single table inheritance. take any additional columns that may have # been added and add them to the history table. for column in local_mapper.local_table.c: if column.key not in super_history_mapper.local_table.c: col = _col_copy(column) super_history_mapper.local_table.append_column(col) table = None if super_history_mapper: bases = (super_history_mapper.class_, ) if table is not None: properties["changed"] = (table.c.changed, ) + tuple( super_history_mapper._props["changed"].columns) else: bases = local_mapper.base_mapper.class_.__bases__ versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {}) # Finally add @property's raising OmittedAttributeErrors for missing cols for col_name in untracked_cols: msg = "{} is explicitly untracked via __chrononaut_untracked__.".format( col_name) setattr(versioned_cls, col_name, property(lambda _: raise_(UntrackedAttributeError(msg)))) for col_name in hidden_cols: msg = "{} is explicitly hidden via __chrononaut_hidden__".format( col_name) setattr(versioned_cls, col_name, property(lambda _: raise_(HiddenAttributeError(msg)))) m = mapper( versioned_cls, table, inherits=super_history_mapper, polymorphic_on=polymorphic_on, polymorphic_identity=local_mapper.polymorphic_identity, properties=properties, ) # strip validators from history tables unless explicitly told not to if getattr(cls, "__chrononaut_copy_validators__", False): m.validators = local_mapper.validators else: m.validators = util.immutabledict() cls.__history_mapper__ = m if not super_history_mapper: local_mapper.local_table.append_column( Column("version", Integer, default=0, nullable=True)) local_mapper.add_property("version", local_mapper.local_table.c.version)
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): """Configure a mapping to the given attrname. This is the "master" method that can be used to create any configuration. :param attrname: String attribute name which will be established as an attribute on this :class:.`.SQLSoup` instance. :param base: a Python class which will be used as the base for the mapped class. If ``None``, the "base" argument specified by this :class:`.SQLSoup` instance's constructor will be used, which defaults to ``object``. :param mapper_args: Dictionary of arguments which will be passed directly to :func:`.orm.mapper`. :param tablename: String name of a :class:`.Table` to be reflected. If a :class:`.Table` is already available, use the ``selectable`` argument. This argument is mutually exclusive versus the ``selectable`` argument. :param selectable: a :class:`.Table`, :class:`.Join`, or :class:`.Select` object which will be mapped. This argument is mutually exclusive versus the ``tablename`` argument. :param schema: String schema name to use if the ``tablename`` argument is present. """ if attrname in self._cache: raise SQLSoupError( "Attribute '%s' is already mapped to '%s'" % (attrname, class_mapper(self._cache[attrname]).mapped_table)) if tablename is not None: if not isinstance(tablename, basestring): raise ArgumentError("'tablename' argument must be a string.") if selectable is not None: raise ArgumentError("'tablename' and 'selectable' " "arguments are mutually exclusive") selectable = Table(tablename, self._metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError("'tablename' argument is required when " "using 'schema'.") elif selectable is not None: if not isinstance(selectable, expression.FromClause): raise ArgumentError("'selectable' argument must be a " "table, select, join, or other " "selectable construct.") else: raise ArgumentError("'tablename' or 'selectable' argument is " "required.") if not selectable.primary_key.columns and not \ 'primary_key' in mapper_args: if tablename: raise SQLSoupError("table '%s' does not have a primary " "key defined" % tablename) else: raise SQLSoupError("selectable '%s' does not have a primary " "key defined" % selectable) mapped_cls = _class_for_table(self.session, self.engine, selectable, base or self.base, mapper_args) self._cache[attrname] = mapped_cls return mapped_cls
def map_to(self, attrname, tablename=None, selectable=None, schema=None, base=None, mapper_args=util.immutabledict()): """Configure a mapping to the given attrname. This is the "master" method that can be used to create any configuration. .. versionadded:: 0.6.6 :param attrname: String attribute name which will be established as an attribute on this :class:.`.SqlSoup` instance. :param base: a Python class which will be used as the base for the mapped class. If ``None``, the "base" argument specified by this :class:`.SqlSoup` instance's constructor will be used, which defaults to ``object``. :param mapper_args: Dictionary of arguments which will be passed directly to :func:`.orm.mapper`. :param tablename: String name of a :class:`.Table` to be reflected. If a :class:`.Table` is already available, use the ``selectable`` argument. This argument is mutually exclusive versus the ``selectable`` argument. :param selectable: a :class:`.Table`, :class:`.Join`, or :class:`.Select` object which will be mapped. This argument is mutually exclusive versus the ``tablename`` argument. :param schema: String schema name to use if the ``tablename`` argument is present. """ if attrname in self._cache: raise InvalidRequestError( "Attribute '%s' is already mapped to '%s'" % ( attrname, class_mapper(self._cache[attrname]).mapped_table )) if tablename is not None: if not isinstance(tablename, basestring): raise ArgumentError("'tablename' argument must be a string." ) if selectable is not None: raise ArgumentError("'tablename' and 'selectable' " "arguments are mutually exclusive") selectable = Table(tablename, self._metadata, autoload=True, autoload_with=self.bind, schema=schema or self.schema) elif schema: raise ArgumentError("'tablename' argument is required when " "using 'schema'.") elif selectable is not None: if not isinstance(selectable, expression.FromClause): raise ArgumentError("'selectable' argument must be a " "table, select, join, or other " "selectable construct.") else: raise ArgumentError("'tablename' or 'selectable' argument is " "required.") if not selectable.primary_key.columns: if tablename: raise PKNotFoundError( "table '%s' does not have a primary " "key defined" % tablename) else: raise PKNotFoundError( "selectable '%s' does not have a primary " "key defined" % selectable) mapped_cls = _class_for_table( self.session, self.engine, selectable, base or self.base, mapper_args ) self._cache[attrname] = mapped_cls return mapped_cls
# MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Shared database object for Invenio.""" from flask_sqlalchemy import SQLAlchemy as FlaskSQLAlchemy from sqlalchemy import MetaData, event, util from sqlalchemy.engine import Engine from werkzeug.local import LocalProxy NAMING_CONVENTION = util.immutabledict({ 'ix': 'ix_%(column_0_label)s', 'uq': 'uq_%(table_name)s_%(column_0_name)s', 'ck': 'ck_%(table_name)s_%(constraint_name)s', 'fk': 'fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s', 'pk': 'pk_%(table_name)s', }) """Configuration for constraint naming conventions.""" metadata = MetaData(naming_convention=NAMING_CONVENTION) """Default database metadata object holding associated schema constructs.""" class SQLAlchemy(FlaskSQLAlchemy): """Implement or overide extension methods.""" def apply_driver_hacks(self, app, info, options): """Called before engine creation.""" # Don't forget to apply hacks defined on parent object. super(SQLAlchemy, self).apply_driver_hacks(app, info, options)
def test_serialize(self): d = util.immutabledict({1:2, 3:4}) for loads, dumps in picklers(): print loads(dumps(d))
def first_connect(dbapi_connection, connection_record): c = base.Connection(engine, connection=dbapi_connection, _has_events=False) c._execution_options = util.immutabledict() dialect.initialize(c)