def post_create_table(self, table): text = "" info = table.dialect_options['redshift'] diststyle = info.get('diststyle') if diststyle: diststyle = diststyle.upper() if diststyle not in ('EVEN', 'KEY', 'ALL'): raise exc.CompileError( u"diststyle {0} is invalid".format(diststyle)) text += " DISTSTYLE " + diststyle distkey = info.get('distkey') if distkey: text += " DISTKEY ({0})".format(distkey) sortkey = info.get('sortkey') interleaved_sortkey = info.get('interleaved_sortkey') if sortkey and interleaved_sortkey: raise exc.ArgumentError( "Parameters sortkey and interleaved_sortkey are " "mutually exclusive; you may not specify both.") if sortkey or interleaved_sortkey: if isinstance(sortkey, string_types): keys = [sortkey] else: keys = sortkey or interleaved_sortkey keys = [ key.name if isinstance(key, Column) else key for key in keys ] if interleaved_sortkey: text += " INTERLEAVED" text += " SORTKEY ({0})".format(", ".join(keys)) return text
def __new__(cls, arg): values = set([c for c in re.split('\s*,\s*', arg or "") if c]) if values.difference(cls._allowed_cascades): raise sa_exc.ArgumentError( "Invalid cascade option(s): %s" % ", ".join([ repr(x) for x in sorted(values.difference(cls._allowed_cascades)) ])) if "all" in values: values.update(cls._add_w_all_cascades) if "none" in values: values.clear() values.discard('all') self = frozenset.__new__(CascadeOptions, values) self.save_update = 'save-update' in values self.delete = 'delete' in values self.refresh_expire = 'refresh-expire' in values self.merge = 'merge' in values self.expunge = 'expunge' in values self.delete_orphan = "delete-orphan" in values if self.delete_orphan and not self.delete: util.warn("The 'delete-orphan' cascade " "option requires 'delete'.") return self
def _new(self, lazy_collection): creator = self.parent.creator if self.parent.creator else \ self.target_class collection_class = util.duck_type_collection(lazy_collection()) if self.parent.proxy_factory: return collection_class, self.parent.proxy_factory( lazy_collection, creator, self.value_attr, self) if self.parent.getset_factory: getter, setter = self.parent.getset_factory(collection_class, self) else: getter, setter = self.parent._default_getset(collection_class) if collection_class is list: return collection_class, AugAssociationList( lazy_collection, creator, getter, setter, self) elif collection_class is dict: return collection_class, AugAssociationDict( lazy_collection, creator, getter, setter, self) elif collection_class is set: return collection_class, AugAssociationSet(lazy_collection, creator, getter, setter, self) else: raise exc.ArgumentError( 'could not guess which interface to use for ' 'collection_class "%s" backing "%s"; specify a ' 'proxy_factory and proxy_bulk_set manually' % (self.collection_class.__name__, self.target_collection))
def aliased(element, alias=None, name=None, adapt_on_names=False): if isinstance(element, expression.FromClause): if adapt_on_names: raise sa_exc.ArgumentError("adapt_on_names only applies to ORM elements") return element.alias(name) else: return AliasedClass(element, alias=alias, name=name, adapt_on_names=adapt_on_names)
def criterion_as_pairs(expression, consider_as_foreign_keys=None, consider_as_referenced_keys=None, any_operator=False): """traverse an expression and locate binary criterion pairs.""" if consider_as_foreign_keys and consider_as_referenced_keys: raise exc.ArgumentError("Can only specify one of 'consider_as_foreign_keys' or 'consider_as_referenced_keys'") def visit_binary(binary): if not any_operator and binary.operator is not operators.eq: return if not isinstance(binary.left, sql.ColumnElement) or not isinstance(binary.right, sql.ColumnElement): return if consider_as_foreign_keys: if binary.left in consider_as_foreign_keys and (binary.right is binary.left or binary.right not in consider_as_foreign_keys): pairs.append((binary.right, binary.left)) elif binary.right in consider_as_foreign_keys and (binary.left is binary.right or binary.left not in consider_as_foreign_keys): pairs.append((binary.left, binary.right)) elif consider_as_referenced_keys: if binary.left in consider_as_referenced_keys and (binary.right is binary.left or binary.right not in consider_as_referenced_keys): pairs.append((binary.left, binary.right)) elif binary.right in consider_as_referenced_keys and (binary.left is binary.right or binary.left not in consider_as_referenced_keys): pairs.append((binary.right, binary.left)) else: if isinstance(binary.left, schema.Column) and isinstance(binary.right, schema.Column): if binary.left.references(binary.right): pairs.append((binary.right, binary.left)) elif binary.right.references(binary.left): pairs.append((binary.left, binary.right)) pairs = [] visitors.traverse(expression, {}, {'binary':visit_binary}) return pairs
def __init__(self, prop): self.prop = prop self.cascade = prop.cascade self.mapper = prop.mapper self.parent = prop.parent self.secondary = prop.secondary self.direction = prop.direction self.post_update = prop.post_update self.passive_deletes = prop.passive_deletes self.passive_updates = prop.passive_updates self.enable_typechecks = prop.enable_typechecks if self.passive_deletes: self._passive_delete_flag = attributes.PASSIVE_NO_INITIALIZE else: self._passive_delete_flag = attributes.PASSIVE_OFF if self.passive_updates: self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE else: self._passive_update_flag= attributes.PASSIVE_OFF self.key = prop.key if not self.prop.synchronize_pairs: raise sa_exc.ArgumentError( "Can't build a DependencyProcessor for relationship %s. " "No target attributes to populate between parent and " "child are present" % self.prop)
def visit_extract(self, extract, **kw): try: return "CAST(STRFTIME('%s', %s) AS INTEGER)" % (self.extract_map[ extract.field], self.process(extract.expr, **kw)) except KeyError: raise exc.ArgumentError("%s is not a valid extract argument." % extract.field)
def __init__(self, convert_unicode=False, assert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, label_length=None, **kwargs): self.convert_unicode = convert_unicode self.assert_unicode = assert_unicode self.encoding = encoding self.positional = False self._ischema = None self.dbapi = dbapi if paramstyle is not None: self.paramstyle = paramstyle elif self.dbapi is not None: self.paramstyle = self.dbapi.paramstyle else: self.paramstyle = self.default_paramstyle self.positional = self.paramstyle in ('qmark', 'format', 'numeric') self.identifier_preparer = self.preparer(self) if label_length and label_length > self.max_identifier_length: raise exc.ArgumentError( "Label length of %d is greater than this dialect's maximum identifier length of %d" % (label_length, self.max_identifier_length)) self.label_length = label_length self.description_encoding = getattr(self, 'description_encoding', encoding)
def create_connect_args(self, url): opts = url.translate_connect_args(username='******') if 'database' in opts: name_spaces = [ unquote_plus(e) for e in opts['database'].split('/') ] if len(name_spaces) == 1: pass elif len(name_spaces) == 2: opts['database'] = name_spaces[0] opts['schema'] = name_spaces[1] else: raise sa_exc.ArgumentError( "Invalid name space is specified: {0}".format( opts['database'])) if '.snowflakecomputing.com' not in opts['host'] and not opts.get( 'port'): opts['account'] = opts['host'] if u'.' in opts['account']: # remove region subdomain opts['account'] = opts['account'][0:opts['account'].find(u'.')] # remove external ID opts['account'] = opts['account'].split('-')[0] opts['host'] = opts['host'] + '.snowflakecomputing.com' opts['port'] = '443' opts['autocommit'] = False # autocommit is disabled by default opts.update(url.query) self._cache_column_metadata = opts.get('cache_column_metadata', "false").lower() == 'true' return ([], opts)
def _listen(cls, target, identifier, fn, retval=False): target._has_events = True if not retval: if identifier == 'before_execute': orig_fn = fn def wrap(conn, clauseelement, multiparams, params): orig_fn(conn, clauseelement, multiparams, params) return clauseelement, multiparams, params fn = wrap elif identifier == 'before_cursor_execute': orig_fn = fn def wrap(conn, cursor, statement, parameters, context, executemany): orig_fn(conn, cursor, statement, parameters, context, executemany) return statement, parameters fn = wrap elif retval and identifier not in ('before_execute', 'before_cursor_execute'): raise exc.ArgumentError("Only the 'before_execute' and " "'before_cursor_execute' engine " "event listeners accept the 'retval=True' " "argument.") event.Events._listen(target, identifier, fn)
def format_type(self, type_, use_schema=True): if not type_.name: raise exc.ArgumentError("Postgresql ENUM type requires a name.") name = self.quote(type_.name, type_.quote) if not self.omit_schema and use_schema and type_.schema is not None: name = self.quote_schema(type_.schema, type_.quote) + "." + name return name
def __init__(self, convert_unicode=False, assert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, implicit_returning=None, label_length=None, **kwargs): if not getattr(self, 'ported_sqla_06', True): util.warn( "The %s dialect is not yet ported to SQLAlchemy 0.6/0.7" % self.name) self.convert_unicode = convert_unicode if assert_unicode: util.warn_deprecated( "assert_unicode is deprecated. " "SQLAlchemy emits a warning in all cases where it " "would otherwise like to encode a Python unicode object " "into a specific encoding but a plain bytestring is " "received. " "This does *not* apply to DBAPIs that coerce Unicode " "natively.") self.encoding = encoding self.positional = False self._ischema = None self.dbapi = dbapi if paramstyle is not None: self.paramstyle = paramstyle elif self.dbapi is not None: self.paramstyle = self.dbapi.paramstyle else: self.paramstyle = self.default_paramstyle if implicit_returning is not None: self.implicit_returning = implicit_returning self.positional = self.paramstyle in ('qmark', 'format', 'numeric') self.identifier_preparer = self.preparer(self) self.type_compiler = self.type_compiler(self) if label_length and label_length > self.max_identifier_length: raise exc.ArgumentError( "Label length of %d is greater than this dialect's" " maximum identifier length of %d" % (label_length, self.max_identifier_length)) self.label_length = label_length if self.description_encoding == 'use_encoding': self._description_decoder = processors.to_unicode_processor_factory( encoding) elif self.description_encoding is not None: self._description_decoder = processors.to_unicode_processor_factory( self.description_encoding) self._encoder = codecs.getencoder(self.encoding) self._decoder = processors.to_unicode_processor_factory(self.encoding)
def set_isolation_level(self, connection, level): try: isolation_level = self._isolation_lookup[level.replace('_', ' ')] except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup))) connection.set_isolation(isolation_level)
def _url(**db_parameters): """ Composes a SQLAlchemy connect string from the given database connection parameters. """ specified_parameters = [] if 'account' not in db_parameters: raise exc.ArgumentError("account parameter must be specified.") if 'host' in db_parameters: ret = 'snowflake://{user}:{password}@{host}:{port}/'.format( user=db_parameters['user'], password=quote_plus(db_parameters.get('password', '')), host=db_parameters['host'], port=db_parameters['port'] if 'port' in db_parameters else 443, ) specified_parameters += ['user', 'password', 'host', 'port'] else: ret = 'snowflake://{user}:{password}@{account}/'.format( account=db_parameters['account'], user=db_parameters['user'], password=db_parameters.get('password', ''), ) specified_parameters += ['user', 'password', 'account'] if 'database' in db_parameters: ret += quote_plus(db_parameters['database']) specified_parameters += ['database'] if 'schema' in db_parameters: ret += '/' + quote_plus(db_parameters['schema']) specified_parameters += ['schema'] elif 'schema' in db_parameters: raise exc.ArgumentError("schema cannot be specified without database") def sep(is_first_parameter): return '?' if is_first_parameter else '&' is_first_parameter = True for p in sorted(db_parameters.keys()): v = db_parameters[p] if p not in specified_parameters: encoded_value = quote_plus(v) if IS_STR(v) else str(v) ret += sep(is_first_parameter) + p + '=' + encoded_value is_first_parameter = False return ret
async def set_isolation_level(self, connection, level): level = level.replace('_', ' ') if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup))) await connection.execute("SET SESSION CHARACTERISTICS AS TRANSACTION " "ISOLATION LEVEL %s" % level) await connection.execute("COMMIT")
async def _set_isolation_level(self, connection, level): if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup)) ) cursor = await connection.cursor() await cursor.execute("SET SESSION TRANSACTION ISOLATION LEVEL %s" % level) await cursor.execute("COMMIT") await cursor.close()
def set_isolation_level(self, connection, level): self.isolation_level = level level = level.replace("_", " ") if level in self._isolation_lookup: connection.set_attr(self.dbapi.SQL_ATTR_TXN_ISOLATION, self._isolation_lookup[level]) else: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup.keys())))
def set_isolation_level(self, connection, level): try: isolation_level = self._isolation_lookup[level.replace('_', ' ')] except KeyError: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup))) cursor = connection.cursor() cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level) cursor.close()
def create_connect_args(self, url): if url.username or url.password or url.host or url.port: raise exc.ArgumentError( "Invalid JsonDB URL: %s\n" "Valid JsonDB URL forms are:\n" " json:///relative/path/to/file.json\n" " json:////absolute/path/to/file.json" % (url,)) filename = os.path.abspath(url.database) opts = url.query.copy() return [filename], opts
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): if isinstance(self, (REAL, DOUBLE)) and \ ( (precision is None and scale is not None) or (precision is not None and scale is None) ): raise exc.ArgumentError( "You must specify both precision and scale or omit " "both altogether.") super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) self.scale = scale
def create_connect_args(self, url): if url.query: raise exc.ArgumentError( "Invalid ctsql URL: %s\nQuery parameters not supported" % (url, )) opts = url.translate_connect_args() username = opts.pop("username", []) if (username): opts['user'] = username return ([], opts)
def set_isolation_level(self, connection, level): log.debug("-->") level = level.replace("_", " ") if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup))) cursor = connection.cursor() #cursor.execute(sql.text("SET TRANSACTION ISOLATION LEVEL %s" % level)) #cursor.execute("COMMIT") cursor.close()
def _find_entity_basestring(self, query, token, raiseerr): for ent in query._mapper_entities: # return only the first _MapperEntity when searching # based on string prop name. Ideally object # attributes are used to specify more exactly. return ent else: if raiseerr: raise sa_exc.ArgumentError( "Query has only expression-based entities - " "can't find property named '%s'." % (token, )) else: return None
def set_parent(self, parent, init): if self.map_column: # implement the 'map_column' option. if self.key not in parent.mapped_table.c: raise sa_exc.ArgumentError( "Can't compile synonym '%s': no column on table " "'%s' named '%s'" % (self.name, parent.mapped_table.description, self.key)) elif parent.mapped_table.c[self.key] in \ parent._columntoproperty and \ parent._columntoproperty[ parent.mapped_table.c[self.key] ].key == self.name: raise sa_exc.ArgumentError( "Can't call map_column=True for synonym %r=%r, " "a ColumnProperty already exists keyed to the name " "%r for column %r" % (self.key, self.name, self.name, self.key)) p = properties.ColumnProperty(parent.mapped_table.c[self.key]) parent._configure_property(self.name, p, init=init, setparent=True) p._mapped_by_synonym = self.key self.parent = parent
def set_isolation_level(self, connection, level): if level == "AUTOCOMMIT": connection.setautocommit(True) else: # no need to set autocommit false explicitly,since it is false by default if level not in self._isolation_lookup: raise exc.ArgumentError( "Invalid value '%s' for isolation_level. " "Valid isolation levels for %s are %s" % (level, self.name, ", ".join(self._isolation_lookup))) else: with connection.cursor() as cursor: cursor.execute("SET TRANSACTION ISOLATION LEVEL %s" % level)
def __init__(self, class_, key, callable_, dispatch, class_manager, copy_function=None, compare_function=None, **kwargs): super(ScalarAttributeImpl, self).__init__( class_, key, callable_, dispatch, compare_function=compare_function, **kwargs) class_manager.mutable_attributes.add(key) if copy_function is None: raise sa_exc.ArgumentError( "MutableScalarAttributeImpl requires a copy function") self.copy = copy_function
def create_connect_args(self, url): if url.username or url.password: raise exc.ArgumentError( "Invalid RQLite URL: %s\n" "Valid RQLite URL forms are:\n" " rqlite+pyrqlite://host:port/[?params]" % (url,)) opts = url.query.copy() util.coerce_kw_type(opts, 'connect_timeout', float) util.coerce_kw_type(opts, 'detect_types', int) util.coerce_kw_type(opts, 'max_redirects', int) opts['port'] = url.port opts['host'] = url.host return ([], opts)
def test_db_error_noncompliant_dbapi(self): try: raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(), DatabaseError) except sa_exceptions.DBAPIError as e: self.assert_(e.__class__ is sa_exceptions.DBAPIError) except OutOfSpec: self.assert_(False) try: raise sa_exceptions.DBAPIError.instance('', [], sa_exceptions.ArgumentError(), DatabaseError) except sa_exceptions.DBAPIError as e: self.assert_(e.__class__ is sa_exceptions.DBAPIError) except sa_exceptions.ArgumentError: self.assert_(False)
def _accept_with(cls, target): if isinstance(target, orm.ScopedSession): if not isinstance(target.session_factory, type) or \ not issubclass(target.session_factory, orm.Session): raise exc.ArgumentError( "Session event listen on a ScopedSession " "requires that its creation callable " "is a Session subclass.") return target.session_factory elif isinstance(target, type): if issubclass(target, orm.ScopedSession): return orm.Session elif issubclass(target, orm.Session): return target elif isinstance(target, orm.Session): return target else: return None
def create_connect_args(self, url): if url.username or url.password or url.host or url.port: raise exc.ArgumentError("Invalid SQLite URL: %s\n" "Valid SQLite URL forms are:\n" " sqlite:///:memory: (or, sqlite://)\n" " sqlite:///relative/path/to/file.db\n" " sqlite:////absolute/path/to/file.db" % (url, )) filename = url.database or ':memory:' opts = url.query.copy() util.coerce_kw_type(opts, 'timeout', float) util.coerce_kw_type(opts, 'isolation_level', str) util.coerce_kw_type(opts, 'detect_types', int) util.coerce_kw_type(opts, 'check_same_thread', bool) util.coerce_kw_type(opts, 'cached_statements', int) return ([filename], opts)