def visit_delete(self, delete_stmt, asfrom=False, **kw): if not self.dialect.supports_delete: raise exc.CompileError( 'ALTER DELETE is not supported by this server version') extra_froms = delete_stmt._extra_froms correlate_froms = {delete_stmt.table}.union(extra_froms) self.stack.append({ "correlate_froms": correlate_froms, "asfrom_froms": correlate_froms, "selectable": delete_stmt, }) text = "ALTER TABLE " table_text = self.delete_table_clause(delete_stmt, delete_stmt.table, extra_froms) text += table_text + " DELETE" if delete_stmt._whereclause is not None: # Do not include table name. # ClickHouse doesn't expect tablename in where. t = delete_stmt._whereclause._compiler_dispatch( self, include_table=False, **kw) if t: text += " WHERE " + t else: raise exc.CompileError('WHERE clause is required') self.stack.pop(-1) return text
def visit_update(self, update_stmt, asfrom=False, **kw): if not self.dialect.supports_update: raise exc.CompileError( 'ALTER UPDATE is not supported by this server version' ) render_extra_froms = [] correlate_froms = {update_stmt.table} self.stack.append( { "correlate_froms": correlate_froms, "asfrom_froms": correlate_froms, "selectable": update_stmt, } ) text = "ALTER TABLE " table_text = self.update_tables_clause( update_stmt, update_stmt.table, render_extra_froms, **kw ) crud_params = crud._setup_crud_params( self, update_stmt, crud.ISUPDATE, **kw ) text += table_text text += " UPDATE " text += ", ".join( c[0]._compiler_dispatch(self, include_table=False) + "=" + c[1] for c in crud_params ) if update_stmt._whereclause is not None: # Do not include table name. # ClickHouse doesn't expect tablename in where. t = self.process(update_stmt._whereclause, include_table=False, **kw) if t: text += " WHERE " + t else: raise exc.CompileError('WHERE clause is required') self.stack.pop(-1) return text
def get_column_specification(self, column, **kwargs): colspec = (self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type)) if column.nullable is not None: if not column.nullable or column.primary_key or \ isinstance(column.default, schema.Sequence): colspec += " NOT NULL" else: colspec += " NULL" if column.table is None: raise exc.CompileError("virtuoso requires Table-bound columns " "in order to generate DDL") # install an IDENTITY Sequence if we either a sequence # or an implicit IDENTITY column if isinstance(column.default, schema.Sequence): if column.default.start == 0: start = 0 else: start = column.default.start or 1 colspec += " IDENTITY (START WITH %s)" % (start, ) elif column is column.table._autoincrement_column: colspec += " IDENTITY" else: default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default return colspec
def post_create_table(self, table): raw_connection = table.bind.raw_connection() # TODO Supports orc, avro, json, csv or tsv format text = "STORED AS PARQUET\n" location = ( raw_connection._kwargs["s3_dir"] if "s3_dir" in raw_connection._kwargs else raw_connection.s3_staging_dir ) if not location: raise exc.CompileError( "`s3_dir` or `s3_staging_dir` parameter is required" " in the connection string." ) schema = table.schema if table.schema else raw_connection.schema_name text += "LOCATION '{0}{1}/{2}/'\n".format(location, schema, table.name) compression = raw_connection._kwargs.get("compression") if compression: text += "TBLPROPERTIES ('parquet.compress'='{0}')\n".format( compression.upper() ) return text
def get_column_specification(self, column, **kw): if column.table is None: raise exc.CompileError( "access requires Table-bound columns " "in order to generate DDL" ) colspec = self.preparer.format_column(column) seq_col = column.table._autoincrement_column if seq_col is column: colspec += " COUNTER" else: colspec += " " + self.dialect.type_compiler.process(column.type) if column.nullable is not None and not column.primary_key: if not column.nullable or column.primary_key: colspec += " NOT NULL" else: colspec += " NULL" default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default return colspec
def post_create_table(self, table): engine = getattr(table, 'engine', None) if not engine: raise exc.CompileError("No engine for table '%s'" % table.name) return ' ENGINE = ' + self.process(engine)
def _prepared_columns(self, table, create_columns, raw_connection): columns, partitions, buckets = [], [], [] raw_conn_partitions = self._get_raw_connection_partitions( raw_connection) raw_conn_buckets = self._get_raw_connection_buckets(raw_connection) for create_column in create_columns: column = create_column.element column_dialect_opts = column.dialect_options["awsathena"] try: processed = self.process(create_column) if processed is not None: if (column_dialect_opts["partition"] or column.name in raw_conn_partitions or f"{table.name}.{column.name}" in raw_conn_partitions): partitions.append(f"\t{processed}") else: columns.append(f"\t{processed}") if (column_dialect_opts["cluster"] or column.name in raw_conn_buckets or f"{table.name}.{column.name}" in raw_conn_buckets): buckets.append( f"\t{self.preparer.format_column(column)}") except exc.CompileError as ce: util.raise_( exc.CompileError( util. u(f"(in table '{table.description}', column '{column.name}'): " f"{ce.args[0]}")), from_=ce, ) return columns, partitions, buckets
def visit_join(self, join, asfrom=False, **kwargs): join_type = " " if getattr(join, 'global_', False): join_type += "GLOBAL " if getattr(join, 'any', False): join_type += "ANY " if getattr(join, 'all', False): join_type += "ALL " if join.isouter: join_type += "LEFT OUTER JOIN " else: join_type += "ANY LEFT JOIN " if not isinstance(join.onclause, elements.ColumnElement): raise exc.CompileError("Only tuple elements are supported. " "Got: %s" % type(join.onclause)) return (join.left._compiler_dispatch(self, asfrom=True, **kwargs) + join_type + join.right._compiler_dispatch(self, asfrom=True, **kwargs) + " USING " + join.onclause._compiler_dispatch( self, **kwargs).split(" = ").pop())
def visit_column(self, column, result_map=None, **kwargs): # MonetDB does not currently support column references that include # a schema name. This could cause problems when selecting from two # identically named tables; this would have to be manually remedied # with table aliases. name = orig_name = column.name if name is None: raise exc.CompileError("Cannot compile Column object until " "it's 'name' is assigned.") is_literal = column.is_literal if not is_literal and isinstance(name, sql._truncated_label): name = self._truncated_identifier("colident", name) if result_map is not None: result_map[name.lower()] = (orig_name, (column, name, column.key), column.type) if is_literal: name = self.escape_literal_column(name) else: name = self.preparer.quote(name, column.quote) table = column.table if table is None or not table.named_with_column: return name else: tablename = table.name if isinstance(tablename, sql._truncated_label): tablename = self._truncated_identifier("alias", tablename) return self.preparer.quote(tablename, table.quote) + \ "." + name
def visit_extract(self, extract, **kw): try: return "CAST(STRFTIME('%s', %s) AS INTEGER)" % (self.extract_map[ extract.field], self.process(extract.expr, **kw)) except KeyError: raise exc.CompileError("%s is not a valid extract argument." % extract.field)
def visit_VARCHAR(self, type_, **kw): if not type_.length: raise exc.CompileError( "VARCHAR requires a length on dialect %s" % self.dialect.name ) basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw) return self._extend_string(type_, basic)
def post_create_table(self, table): text = "" info = table.dialect_options['redshift'] diststyle = info.get('diststyle') if diststyle: diststyle = diststyle.upper() if diststyle not in ('EVEN', 'KEY', 'ALL'): raise exc.CompileError( u"diststyle {0} is invalid".format(diststyle)) text += " DISTSTYLE " + diststyle distkey = info.get('distkey') if distkey: text += " DISTKEY ({0})".format(distkey) sortkey = info.get('sortkey') interleaved_sortkey = info.get('interleaved_sortkey') if sortkey and interleaved_sortkey: raise exc.ArgumentError( "Parameters sortkey and interleaved_sortkey are " "mutually exclusive; you may not specify both.") if sortkey or interleaved_sortkey: if isinstance(sortkey, string_types): keys = [sortkey] else: keys = sortkey or interleaved_sortkey keys = [ key.name if isinstance(key, Column) else key for key in keys ] if interleaved_sortkey: text += " INTERLEAVED" text += " SORTKEY ({0})".format(", ".join(keys)) return text
def visit_column(self, column, add_to_result_map=None, include_table=True, **kwargs): name = orig_name = column.name if name is None: raise exc.CompileError("Cannot compile Column object until " "its 'name' is assigned.") is_literal = column.is_literal if not is_literal and isinstance(name, sql._truncated_label): name = self._truncated_identifier("colident", name) if add_to_result_map is not None: add_to_result_map( name, orig_name, (column, name, column.key), column.type ) tablename = None table = column.table if table is not None and include_table and table.named_with_column: tablename = table.name if isinstance(tablename, sql._truncated_label): tablename = self._truncated_identifier("alias", tablename) if is_literal: assert tablename is None assert table is None return resolver.LiteralResolver(name) else: return resolver.ColumnResolver(name, tablename)
def order_by_clause(self, select, **kw): order_by = self.process(select._order_by_clause, **kw) # ORDER BY clauses in DISTINCT queries must reference aliased # inner columns by alias name, not true column name. if order_by and getattr(select, '_distinct', False): labels = self._find_labeled_columns(select.inner_columns, select.use_labels) if labels: for needs_alias in labels.keys(): r = re.compile(r'(^| )(%s)(,| |$)' % re.escape(needs_alias)) order_by = r.sub((r'\1%s\3' % labels[needs_alias]), order_by) # No ORDER BY in subqueries. if order_by: if self.is_subquery(): # It's safe to simply drop the ORDER BY if there is no # LIMIT. Right? Other dialects seem to get away with # dropping order. if select._limit: raise exc.CompileError( "MaxDB does not support ORDER BY in subqueries") else: return "" return " ORDER BY " + order_by else: return ""
def visit_join(self, join, asfrom=False, **kwargs): join_type = " " if join.global_: join_type += "GLOBAL " if join.any: join_type += "ANY " if join.all: join_type += "ALL " if join.isouter: join_type += "LEFT OUTER JOIN " else: join_type += "INNER JOIN " if not isinstance(join.onclause, elements.Tuple): raise exc.CompileError("Only tuple elements are supported. " "Got: %s" % type(join.onclause)) return (join.left._compiler_dispatch(self, asfrom=True, **kwargs) + join_type + join.right._compiler_dispatch(self, asfrom=True, **kwargs) + " USING " + join.onclause._compiler_dispatch(self, **kwargs))
def visit_create_table(self, create): table = create.element preparer = self.preparer text = '\nCREATE EXTERNAL ' text += 'TABLE ' + preparer.format_table(table) + ' ' text += '(' separator = '\n' for create_column in create.columns: column = create_column.element try: processed = self.process(create_column) if processed is not None: text += separator separator = ", \n" text += "\t" + processed except exc.CompileError as ce: util.raise_from_cause( exc.CompileError( util.u("(in table '{0}', column '{1}'): {2}").format( table.description, column.name, ce.args[0]))) const = self.create_table_constraints( table, _include_foreign_key_constraints=create. include_foreign_key_constraints, ) if const: text += separator + "\t" + const text += "\n)\n%s\n\n" % self.post_create_table(table) return text
def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) + " " + \ self.dialect.type_compiler.process(column.type) if column.table is None: raise exc.CompileError("The Sybase dialect requires Table-bound " "columns in order to generate DDL") seq_col = column.table._autoincrement_column # install a IDENTITY Sequence if we have an implicit IDENTITY column if seq_col is column: sequence = isinstance(column.default, sa_schema.Sequence) \ and column.default if sequence: start, increment = sequence.start or 1, \ sequence.increment or 1 else: start, increment = 1, 1 if (start, increment) == (1, 1): colspec += " IDENTITY" else: # TODO: need correct syntax for this colspec += " IDENTITY(%s,%s)" % (start, increment) else: if column.nullable is not None: if not column.nullable or column.primary_key: colspec += " NOT NULL" else: colspec += " NULL" default = self.get_column_default_string(column) if default is not None: colspec += " DEFAULT " + default return colspec
def visit_lambda(self, lambda_, **kw): func = lambda_.func spec = inspect_getargspec(func) if spec.varargs: raise exc.CompileError('Lambdas with *args are not supported') if spec.keywords: raise exc.CompileError('Lambdas with **kwargs are not supported') text = ', '.join(spec.args) + ' -> ' args = [literal_column(arg) for arg in spec.args] text += self.process(func(*args), **kw) return text
def visit_computed_column(self, generated): if generated.persisted is not None: raise exc.CompileError( "Firebird computed columns do not support a persistence " "method setting; set the 'persisted' flag to None for " "Firebird support.") return "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process( generated.sqltext, include_table=False, literal_binds=True)
def _get_table_location_specification(self, table, dialect_opts, raw_connection): location = self._get_table_location(table, dialect_opts, raw_connection) text = [] if location: text.append(f"LOCATION '{location}'") else: if raw_connection: raise exc.CompileError( "`location` or `s3_staging_dir` parameter is required " "in the connection string") else: raise exc.CompileError( "The location of the table should be specified " "by the dialect keyword argument `awsathena_location`") return "\n".join(text)
def visit_bindparam(self, bindparam, **kwargs): name = self._truncate_bindparam(bindparam) if name in self.binds: existing = self.binds[name] if existing is not bindparam and (existing.unique or bindparam.unique): raise exc.CompileError("Bind parameter '%s' conflicts with unique bind parameter of the same name" % bindparam.key) self.binds[bindparam.key] = self.binds[name] = bindparam return self.bindparam_string(name)
def visit_computed_column(self, generated): if generated.persisted is False: raise exc.CompileError( "CockroachDB computed columns do not support 'virtual' " "persistence; set the 'persisted' flag to None or True for " "CockroachDB support.") return "AS (%s) STORED" % self.sql_compiler.process( generated.sqltext, include_table=False, literal_binds=True)
def visit_column(self, column, include_table=True, **kwargs): if column.name is None: raise exc.CompileError("Cannot compile Column object until " "its 'name' is assigned.") if column.table is None or not include_table or \ not column.table.named_with_column: return column.name else: return column.table.name + "/" + column.name
def __call__(self, element, compiler, **kw): # TODO: yes, this could also switch off of DBAPI in use. fn = self.specs.get(compiler.dialect.name, None) if not fn: try: fn = self.specs['default'] except KeyError: raise exc.CompileError("%s construct has no default " "compilation handler." % type(element)) return fn(element, compiler, **kw)
def visit_update(self, update_stmt, **kw): if not self.dialect.supports_update: raise exc.CompileError( 'ALTER UPDATE is not supported by this server version') compile_state = update_stmt._compile_state_factory( update_stmt, self, **kw) update_stmt = compile_state.statement render_extra_froms = [] correlate_froms = {update_stmt.table} self.stack.append({ "correlate_froms": correlate_froms, "asfrom_froms": correlate_froms, "selectable": update_stmt, }) text = "ALTER TABLE " table_text = self.update_tables_clause(update_stmt, update_stmt.table, render_extra_froms, **kw) crud_params = crud._get_crud_params(self, update_stmt, compile_state, **kw) text += table_text text += " UPDATE " text += ", ".join(expr + "=" + value for c, expr, value in crud_params) if update_stmt._where_criteria: t = self._generate_delimited_and_list(update_stmt._where_criteria, include_table=False, **kw) if t: text += " WHERE " + t else: raise exc.CompileError('WHERE clause is required') self.stack.pop(-1) return text
def visit_replace(self, replace_stmt): """ REPLACE INTO dialect statement (stealed from visit_insert) """ self.isinsert = True colparams = self._get_colparams(replace_stmt) if not colparams and \ not self.dialect.supports_default_values and \ not self.dialect.supports_empty_insert: raise exc.CompileError("The version of %s you are using does " "not support empty inserts." % self.dialect.name) preparer = self.preparer supports_default_values = self.dialect.supports_default_values text = "REPLACE" prefixes = [self.process(x) for x in replace_stmt._prefixes] if prefixes: text += " " + " ".join(prefixes) text += " INTO " + preparer.format_table(replace_stmt.table) if replace_stmt._hints: dialect_hints = dict([ (table, hint_text) for (table, dialect), hint_text in replace_stmt._hints.items() if dialect in ('*', self.dialect.name) ]) if replace_stmt.table in dialect_hints: text += " " + self.get_crud_hint_text( replace_stmt.table, dialect_hints[replace_stmt.table]) if colparams or not supports_default_values: text += " (%s)" % ', '.join( [preparer.format_column(c[0]) for c in colparams]) if self.returning or replace_stmt._returning: self.returning = self.returning or replace_stmt._returning returning_clause = self.returning_clause(replace_stmt, self.returning) if self.returning_precedes_values: text += " " + returning_clause text += " VALUES (%s)" % \ ', '.join([c[1] for c in colparams]) if self.returning and not self.returning_precedes_values: text += " " + returning_clause return text
def limit_clause(self, select, **kw): text = '' if select._limit_clause is not None: text += ' \n LIMIT ' if select._offset_clause is not None: text += self.process(select._offset_clause, **kw) + ', ' text += self.process(select._limit_clause, **kw) else: if select._offset_clause is not None: raise exc.CompileError('OFFSET without LIMIT is not supported') return text
def visit_insert(self, insert_stmt, asfrom=False, **kw): assert not self.stack # INSERT only at top level self.stack.append({ "correlate_froms": set(), "asfrom_froms": set(), "selectable": insert_stmt, }) preparer = self.preparer text = "INSERT INTO " table_text = preparer.format_table(insert_stmt.table) text += table_text if insert_stmt.select is not None: # `_setup_crud_params()` multiplies parameter placeholders for # multiparam inserts. We don't want this, so this part is moved to # the branch for `INSERT INTO ... SELECT`. crud_params = crud._setup_crud_params(self, insert_stmt, crud.ISINSERT, **kw) if not crud_params: # pragma: no cover raise exc.CompileError( "The '%s' dialect with current database " "version settings does not support empty " "inserts." % self.dialect.name) assert not insert_stmt._has_multi_parameters text += " (%s)" % ", ".join( [preparer.format_column(c[0]) for c in crud_params]) select_text = self.process(self._insert_from_select, **kw) # TODO Provide visit_cte for Clickhouse variant of CTE if self.ctes: text += " %s%s" % (self._render_cte_clause(), select_text) else: text += " %s" % select_text else: # This is normally done by `crud._setup_crud_params()` self.isinsert = True self._clickhouse_json_each_row = True text += ' FORMAT JSONEachRow' assert insert_stmt._post_values_clause is None self.stack.pop(-1) assert not asfrom return text
def post_create_table(self, table): engine = getattr(table, 'engine', None) if not engine: raise exc.CompileError("No engine for table '%s'" % table.name) text = ' ENGINE = ' + self.process(engine) if table.comment is not None: literal = self.sql_compiler.render_literal_value( table.comment, sqltypes.String()) text += ' COMMENT ' + literal return text
def limit_clause(self, select, **kw): text = "" if select._limit is not None and select._offset is None: text += "\n LIMIT " + self.process(sql.literal(select._limit)) elif select._limit is not None and select._offset is not None: text += "\n LIMIT %s, %s" % (self.process( sql.literal( select._offset)), self.process(sql.literal(select._limit))) elif select._offset is not None: raise exc.CompileError( "Cannot compile LIMIT clause, SELECT couldn't have only OFFSET" " clause without LIMIT") return text