async def try_compile_rollback(self, dbver: int, eql: bytes): statements = edgeql.parse_block(eql.decode()) stmt = statements[0] unit = None if isinstance(stmt, qlast.RollbackTransaction): sql = b'ROLLBACK;' unit = dbstate.QueryUnit(dbver=dbver, status=b'ROLLBACK', sql=(sql, ), tx_rollback=True, cacheable=False) elif isinstance(stmt, qlast.RollbackToSavepoint): sql = f'ROLLBACK TO {pg_common.quote_ident(stmt.name)};'.encode() unit = dbstate.QueryUnit(dbver=dbver, status=b'ROLLBACK TO SAVEPOINT', sql=(sql, ), tx_savepoint_rollback=True, cacheable=False) if unit is not None: return unit, len(statements) - 1 raise errors.TransactionError( 'expected a ROLLBACK or ROLLBACK TO SAVEPOINT command' ) # pragma: no cover
def apply_ddl_script_ex( ddl_text: str, *, schema: s_schema.Schema, modaliases: Optional[Mapping[Optional[str], str]] = None, stdmode: bool = False, testmode: bool = False, ) -> Tuple[s_schema.Schema, sd.DeltaRoot]: delta = sd.DeltaRoot() if modaliases is None: modaliases = {} for ddl_stmt in edgeql.parse_block(ddl_text): schema, cmd = _delta_from_ddl( ddl_stmt, schema=schema, modaliases=modaliases, stdmode=stdmode, testmode=testmode, ) delta.add(cmd) return schema, delta
def apply_ddl_script_ex( ddl_text: str, *, schema: s_schema.Schema, modaliases: Optional[Mapping[Optional[str], str]] = None, stdmode: bool = False, internal_schema_mode: bool = False, testmode: bool = False, ) -> Tuple[s_schema.Schema, sd.DeltaRoot]: delta = sd.DeltaRoot() if modaliases is None: modaliases = {} for ddl_stmt in edgeql.parse_block(ddl_text): if not isinstance(ddl_stmt, qlast.DDLCommand): raise AssertionError(f'expected DDLCommand node, got {ddl_stmt!r}') schema, cmd = _delta_from_ddl( ddl_stmt, schema=schema, modaliases=modaliases, stdmode=stdmode, internal_schema_mode=internal_schema_mode, testmode=testmode, ) delta.add(cmd) return schema, delta
async def _make_stdlib(testmode: bool): schema = s_schema.Schema() current_block = None std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, schema=schema, modaliases={None: 'std'}, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Do a dry-run on test_schema to canonicalize # the schema delta-commands. test_schema = schema context = sd.CommandContext() context.stdmode = True delta_command.apply(test_schema, context=context) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): if (current_block is not None and not isinstance(current_block, dbops.SQLBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.SQLBlock() else: if (current_block is not None and not isinstance(current_block, dbops.PLTopBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.PLTopBlock() plan.generate(current_block) sql_text = current_block.to_string() return schema, sql_text
async def _amend_stdlib( ddl_text: str, stdlib: StdlibBits, ) -> Tuple[StdlibBits, str]: schema = stdlib.stdschema reflschema = stdlib.reflschema topblock = dbops.PLTopBlock() plans = [] context = sd.CommandContext() context.stdmode = True for ddl_cmd in edgeql.parse_block(ddl_text): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) reflschema = delta_command.apply(reflschema, context) plan.generate(topblock) plans.append(plan) compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=stdlib.classlayout, ) compilerctx = edbcompiler.new_compiler_context( schema, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(schema) for plan in plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=plan, block=topblock, ) sqltext = topblock.to_string() return stdlib._replace(stdschema=schema, reflschema=reflschema), sqltext
def load_std_module( schema: s_schema.Schema, modname: str) -> s_schema.Schema: modaliases = {} context = s_delta.CommandContext() context.stdmode = True modtext = get_std_module_text(modname) for statement in edgeql.parse_block(modtext): schema = s_ddl.apply_ddl( statement, schema=schema, modaliases=modaliases, stdmode=True) return schema
async def _execute_edgeql_ddl( schema: s_schema.Schema, ddltext: str, stdmode: bool = True, ) -> s_schema.Schema: context = sd.CommandContext(stdmode=stdmode) for ddl_cmd in edgeql.parse_block(ddltext): delta_command = s_ddl.delta_from_ddl( ddl_cmd, modaliases={}, schema=schema, stdmode=stdmode) schema = delta_command.apply(schema, context) return schema
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None for stmt in statements: if isinstance(stmt, qlast.CreateDelta): # CREATE MIGRATION if target_schema is None: target_schema = _load_std_schema() ddl_plan = s_ddl.cmd_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) ddl_plan = s_ddl.compile_migration(ddl_plan, target_schema, current_schema) elif isinstance(stmt, qlast.Delta): # APPLY MIGRATION delta_cmd = s_ddl.cmd_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) delta = current_schema.get(delta_cmd.classname) ddl_plan = sd.DeltaRoot(canonical=True) ddl_plan.update(delta.get_commands(current_schema)) elif isinstance(stmt, qlast.DDL): # CREATE/DELETE/ALTER (FUNCTION, TYPE, etc) ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') context = sd.CommandContext() context.testmode = True current_schema, _ = ddl_plan.apply(current_schema, context) return current_schema
async def _execute_edgeql_ddl( schema: s_schema.Schema_T, ddltext: str, stdmode: bool = True, ) -> s_schema.Schema_T: context = sd.CommandContext(stdmode=stdmode) for ddl_cmd in edgeql.parse_block(ddltext): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl( ddl_cmd, modaliases={}, schema=schema, stdmode=stdmode) schema = delta_command.apply(schema, context) # type: ignore return schema
def _assert_migration_consistency(self, schema_text): migration_text = f''' CREATE MIGRATION m TO {{ {schema_text} }}; ''' migration_ql = edgeql.parse_block(migration_text) migration_cmd = s_ddl.cmd_from_ddl( migration_ql[0], schema=self.schema, modaliases={None: 'default'}, ) migration_cmd = s_ddl.compile_migration( migration_cmd, self.std_schema, self.schema, ) context = s_delta.CommandContext() schema, migration = migration_cmd.apply(self.schema, context) ddl_plan = s_delta.DeltaRoot(canonical=True) ddl_plan.update(migration.get_commands(schema)) baseline_schema, _ = ddl_plan.apply(schema, context) ddl_text = s_ddl.ddl_text_from_delta(schema, migration) try: test_schema = self.run_ddl(schema, ddl_text) except errors.EdgeDBError as e: self.fail(markup.dumps(e)) diff = s_ddl.delta_schemas(baseline_schema, test_schema) if list(diff.get_subcommands()): self.fail( f'unexpected difference in schema produced by\n' f'COMMIT MIGRATION and DDL obtained from GET MIGRATION:\n' f'{markup.dumps(diff)}\n' f'DDL text was:\n{ddl_text}')
def load_std_module(schema: s_schema.Schema, modname: str) -> s_schema.Schema: modaliases = {} if modname == 'std': modaliases[None] = 'std' context = s_delta.CommandContext() context.stdmode = True modtext = get_std_module_text(modname) for statement in edgeql.parse_block(modtext): cmd = s_ddl.delta_from_ddl(statement, schema=schema, modaliases=modaliases, stdmode=True) schema, _ = cmd.apply(schema, context) return schema
async def _make_stdlib( testmode: bool) -> Tuple[s_schema.Schema, str, Set[uuid.UUID]]: schema = s_schema.Schema() schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__') schema = s_pseudo.populate_types(schema) current_block = None std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) new_types: Set[uuid.UUID] = set() for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) if isinstance(plan, (s_db.CreateDatabase, s_db.DropDatabase)): if (current_block is not None and not isinstance(current_block, dbops.SQLBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.SQLBlock() else: new_types.update(plan.new_types) if (current_block is not None and not isinstance(current_block, dbops.PLTopBlock)): raise errors.QueryError( 'cannot mix DATABASE commands with regular DDL ' 'commands in a single block') if current_block is None: current_block = dbops.PLTopBlock() plan.generate(current_block) assert current_block is not None sql_text = current_block.to_string() mods = { mod.get_name(schema) for mod in schema.get_modules() if mod.get_builtin(schema) } if mods != s_schema.STD_MODULES: raise errors.SchemaError( f'modules {s_schema.STD_MODULES - mods} are not marked as builtin') return schema, sql_text, new_types
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits: schema = s_schema.Schema() schema, _ = s_mod.Module.create_in_schema(schema, name='__derived__') current_block = dbops.PLTopBlock() std_texts = [] for modname in s_schema.STD_LIB + ('stdgraphql', ): std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text('_testmode')) ddl_text = '\n'.join(std_texts) types: Set[uuid.UUID] = set() std_plans: List[sd.Command] = [] for ddl_cmd in edgeql.parse_block(ddl_text): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) std_plans.append(delta_command) types.update(plan.new_types) plan.generate(current_block) stdglobals = '\n'.join([ f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}' }};''', f'''CREATE DATABASE {edbdef.EDGEDB_TEMPLATE_DB} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_TEMPLATE_DB]}' }};''', f'CREATE DATABASE {edbdef.EDGEDB_SUPERUSER_DB};', ]) context = sd.CommandContext(stdmode=True) for ddl_cmd in edgeql.parse_block(stdglobals): delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) schema = delta_command.apply(schema, context) refldelta, classlayout, introparts = s_refl.generate_structure(schema) reflschema, reflplan = _process_delta(refldelta, schema) std_plans.append(refldelta) assert current_block is not None reflplan.generate(current_block) subblock = current_block.add_block() compiler = edbcompiler.new_compiler( std_schema=schema, reflection_schema=reflschema, schema_class_layout=classlayout, bootstrap_mode=True, ) compilerctx = edbcompiler.new_compiler_context(reflschema) for std_plan in std_plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=std_plan, block=subblock, is_internal_reflection=std_plan is refldelta, stdmode=True, ) sqltext = current_block.to_string() compilerctx = edbcompiler.new_compiler_context( reflschema, schema_reflection_mode=True, output_format=edbcompiler.IoFormat.JSON_ELEMENTS, ) # The introspection query bits are returned in chunks # because it's a large UNION and we currently generate SQL # that is much harder for Posgres to plan as opposed to a # straight flat UNION. sql_introparts = [] for intropart in introparts: introtokens = tokenizer.tokenize(intropart.encode()) units = compiler._compile(ctx=compilerctx, tokens=introtokens) assert len(units) == 1 and len(units[0].sql) == 1 sql_intropart = units[0].sql[0].decode() sql_introparts.append(sql_intropart) introsql = ' UNION ALL '.join(sql_introparts) return StdlibBits( stdschema=schema, reflschema=reflschema, sqltext=sqltext, types=types, classlayout=classlayout, introquery=introsql, )
def _compile(self, *, ctx: CompileContext, eql: bytes) -> typing.List[dbstate.QueryUnit]: # When True it means that we're compiling for "connection.fetchall()". # That means that the returned QueryUnit has to have the in/out codec # information, correctly inferred "singleton_result" field etc. single_stmt_mode = ctx.stmt_mode is enums.CompileStatementMode.SINGLE default_cardinality = enums.ResultCardinality.NOT_APPLICABLE eql = eql.decode() statements = edgeql.parse_block(eql) statements_len = len(statements) if ctx.stmt_mode is enums.CompileStatementMode.SKIP_FIRST: statements = statements[1:] if not statements: # pragma: no cover # Shouldn't ever happen as the server tracks the number # of statements (via the "try_compile_rollback()" method) # before using SKIP_FIRST. raise errors.ProtocolError( f'no statements to compile in SKIP_FIRST mode') elif single_stmt_mode and statements_len != 1: raise errors.ProtocolError( f'expected one statement, got {statements_len}') if not len(statements): # pragma: no cover raise errors.ProtocolError('nothing to compile') units = [] unit = None for stmt in statements: comp: dbstate.BaseQuery = self._compile_dispatch_ql(ctx, stmt) if unit is not None: if (isinstance(comp, dbstate.TxControlQuery) and comp.single_unit): units.append(unit) unit = None if unit is None: unit = dbstate.QueryUnit(dbver=ctx.state.dbver, sql=(), status=status.get_status(stmt), cardinality=default_cardinality) else: unit.status = status.get_status(stmt) if isinstance(comp, dbstate.Query): if single_stmt_mode: unit.sql = comp.sql unit.sql_hash = comp.sql_hash unit.out_type_data = comp.out_type_data unit.out_type_id = comp.out_type_id unit.in_type_data = comp.in_type_data unit.in_type_args = comp.in_type_args unit.in_type_id = comp.in_type_id unit.in_array_backend_tids = comp.in_array_backend_tids unit.cacheable = True unit.cardinality = comp.cardinality else: unit.sql += comp.sql elif isinstance(comp, dbstate.SimpleQuery): assert not single_stmt_mode unit.sql += comp.sql elif isinstance(comp, dbstate.DDLQuery): unit.sql += comp.sql unit.has_ddl = True unit.new_types = comp.new_types elif isinstance(comp, dbstate.TxControlQuery): unit.sql += comp.sql unit.cacheable = comp.cacheable if comp.modaliases is not None: unit.modaliases = comp.modaliases if comp.action == dbstate.TxAction.START: if unit.tx_id is not None: raise errors.InternalServerError( 'already in transaction') unit.tx_id = ctx.state.current_tx().id elif comp.action == dbstate.TxAction.COMMIT: unit.tx_commit = True elif comp.action == dbstate.TxAction.ROLLBACK: unit.tx_rollback = True elif comp.action is dbstate.TxAction.ROLLBACK_TO_SAVEPOINT: unit.tx_savepoint_rollback = True if comp.single_unit: units.append(unit) unit = None elif isinstance(comp, dbstate.SessionStateQuery): unit.sql += comp.sql if comp.is_system_setting: if (not ctx.state.current_tx().is_implicit() or statements_len > 1): raise errors.QueryError( 'CONFIGURE SYSTEM cannot be executed in a ' 'transaction block') unit.system_config = True if comp.is_backend_setting: unit.backend_config = True if comp.requires_restart: unit.config_requires_restart = True if ctx.state.current_tx().is_implicit(): unit.modaliases = ctx.state.current_tx().get_modaliases() if comp.config_op is not None: if unit.config_ops is None: unit.config_ops = [] unit.config_ops.append(comp.config_op) unit.has_set = True else: # pragma: no cover raise errors.InternalServerError('unknown compile state') if unit is not None: units.append(unit) if single_stmt_mode: if len(units) != 1: # pragma: no cover raise errors.InternalServerError( f'expected 1 compiled unit; got {len(units)}') for unit in units: # pragma: no cover # Sanity checks na_cardinality = (unit.cardinality is enums.ResultCardinality.NOT_APPLICABLE) if unit.cacheable and (unit.config_ops or unit.modaliases): raise errors.InternalServerError( f'QueryUnit {unit!r} is cacheable but has config/aliases') if not unit.sql: raise errors.InternalServerError( f'QueryUnit {unit!r} has no SQL commands in it') if not na_cardinality and ( len(unit.sql) > 1 or unit.tx_commit or unit.tx_rollback or unit.tx_savepoint_rollback or unit.out_type_id is sertypes.NULL_TYPE_ID or unit.system_config or unit.config_ops or unit.modaliases or unit.has_set or unit.has_ddl or not unit.sql_hash): raise errors.InternalServerError( f'unit has invalid "cardinality": {unit!r}') return units
def parse(querystr: str) -> qlast.Expr: source = edgeql.Source.from_string(querystr) statements = edgeql.parse_block(source) assert len(statements) == 1 assert isinstance(statements[0], qlast.Expr) return statements[0]
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] migration_plan = None for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if not migration_script: migration_plan = migration_diff migration_script.extend( s_ddl.ddlast_from_delta( migration_target, migration_diff, ), ) elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( commands=migration_script, auto_diff=migration_plan, parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) migration_schema = None migration_target = None migration_script = [] migration_plan = None elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema
async def _make_stdlib(testmode: bool, global_ids) -> StdlibBits: schema = s_schema.ChainedSchema( s_schema.FlatSchema(), s_schema.FlatSchema(), s_schema.FlatSchema(), ) schema, _ = s_mod.Module.create_in_schema( schema, name=sn.UnqualName('__derived__'), ) current_block = dbops.PLTopBlock() std_texts = [] for modname in s_schema.STD_SOURCES: std_texts.append(s_std.get_std_module_text(modname)) if testmode: std_texts.append(s_std.get_std_module_text(sn.UnqualName('_testmode'))) ddl_text = '\n'.join(std_texts) types: Set[uuid.UUID] = set() std_plans: List[sd.Command] = [] for ddl_cmd in edgeql.parse_block(ddl_text): assert isinstance(ddl_cmd, qlast.DDLCommand) delta_command = s_ddl.delta_from_ddl(ddl_cmd, modaliases={}, schema=schema, stdmode=True) if debug.flags.delta_plan_input: debug.header('Delta Plan Input') debug.dump(delta_command) # Apply and adapt delta, build native delta plan, which # will also update the schema. schema, plan = _process_delta(delta_command, schema) std_plans.append(delta_command) types.update(plan.new_types) plan.generate(current_block) _, schema_version = s_std.make_schema_version(schema) schema, plan = _process_delta(schema_version, schema) std_plans.append(schema_version) plan.generate(current_block) stdglobals = '\n'.join([ f'''CREATE SUPERUSER ROLE {edbdef.EDGEDB_SUPERUSER} {{ SET id := <uuid>'{global_ids[edbdef.EDGEDB_SUPERUSER]}' }};''', ]) schema = await _execute_edgeql_ddl(schema, stdglobals) _, global_schema_version = s_std.make_global_schema_version(schema) schema, plan = _process_delta(global_schema_version, schema) std_plans.append(global_schema_version) plan.generate(current_block) reflection = s_refl.generate_structure(schema) reflschema, reflplan = _process_delta(reflection.intro_schema_delta, schema) assert current_block is not None reflplan.generate(current_block) subblock = current_block.add_block() compiler = edbcompiler.new_compiler( std_schema=schema.get_top_schema(), reflection_schema=reflschema.get_top_schema(), schema_class_layout=reflection.class_layout, # type: ignore ) compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), bootstrap_mode=True, ) for std_plan in std_plans: compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=std_plan, block=subblock, ) compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), bootstrap_mode=True, internal_schema_mode=True, ) compiler._compile_schema_storage_in_delta( ctx=compilerctx, delta=reflection.intro_schema_delta, block=subblock, ) sqltext = current_block.to_string() compilerctx = edbcompiler.new_compiler_context( user_schema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), schema_reflection_mode=True, output_format=edbcompiler.IoFormat.JSON_ELEMENTS, ) # The introspection query bits are returned in chunks # because it's a large UNION and we currently generate SQL # that is much harder for Posgres to plan as opposed to a # straight flat UNION. sql_intro_local_parts = [] sql_intro_global_parts = [] for intropart in reflection.local_intro_parts: sql_intro_local_parts.append( compile_single_query( intropart, compiler=compiler, compilerctx=compilerctx, ), ) for intropart in reflection.global_intro_parts: sql_intro_global_parts.append( compile_single_query( intropart, compiler=compiler, compilerctx=compilerctx, ), ) local_intro_sql = ' UNION ALL '.join(sql_intro_local_parts) local_intro_sql = f''' WITH intro(c) AS ({local_intro_sql}) SELECT json_agg(intro.c) FROM intro ''' global_intro_sql = ' UNION ALL '.join(sql_intro_global_parts) global_intro_sql = f''' WITH intro(c) AS ({global_intro_sql}) SELECT json_agg(intro.c) FROM intro ''' return StdlibBits( stdschema=schema.get_top_schema(), reflschema=reflschema.get_top_schema(), global_schema=schema.get_global_schema(), sqltext=sqltext, types=types, classlayout=reflection.class_layout, local_intro_query=local_intro_sql, global_intro_query=global_intro_sql, )
def run_ddl(cls, schema, ddl, default_module=defines.DEFAULT_MODULE_ALIAS): statements = edgeql.parse_block(ddl) current_schema = schema target_schema = None migration_schema = None migration_target = None migration_script = [] for stmt in statements: if isinstance(stmt, qlast.StartMigration): # START MIGRATION if target_schema is None: target_schema = _load_std_schema() migration_target = s_ddl.apply_sdl( stmt.target, base_schema=target_schema, current_schema=current_schema, testmode=True, ) migration_schema = current_schema ddl_plan = None elif isinstance(stmt, qlast.PopulateMigration): # POPULATE MIGRATION if migration_target is None: raise errors.QueryError( 'unexpected POPULATE MIGRATION:' ' not currently in a migration block', context=stmt.context, ) migration_diff = s_ddl.delta_schemas( migration_schema, migration_target, ) if debug.flags.delta_plan: debug.header('Populate Migration Diff') debug.dump(migration_diff, schema=schema) new_ddl = s_ddl.ddlast_from_delta( migration_schema, migration_target, migration_diff, ) migration_script.extend(new_ddl) if debug.flags.delta_plan: debug.header('Populate Migration DDL AST') text = [] for cmd in new_ddl: debug.dump(cmd) text.append(edgeql.generate_source(cmd, pretty=True)) debug.header('Populate Migration DDL Text') debug.dump_code(';\n'.join(text) + ';') elif isinstance(stmt, qlast.CommitMigration): if migration_target is None: raise errors.QueryError( 'unexpected COMMIT MIGRATION:' ' not currently in a migration block', context=stmt.context, ) last_migration = current_schema.get_last_migration() if last_migration: last_migration_ref = s_utils.name_to_ast_ref( last_migration.get_name(current_schema), ) else: last_migration_ref = None create_migration = qlast.CreateMigration( body=qlast.MigrationBody(commands=tuple(migration_script)), parent=last_migration_ref, ) ddl_plan = s_ddl.delta_from_ddl( create_migration, schema=migration_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) migration_schema = None migration_target = None migration_script = [] elif isinstance(stmt, qlast.DDL): if migration_target is not None: migration_script.append(stmt) ddl_plan = None else: ddl_plan = s_ddl.delta_from_ddl( stmt, schema=current_schema, modaliases={None: default_module}, testmode=True, ) if debug.flags.delta_plan: debug.header('Delta Plan') debug.dump(ddl_plan, schema=schema) else: raise ValueError( f'unexpected {stmt!r} in compiler setup script') if ddl_plan is not None: context = sd.CommandContext() context.testmode = True current_schema = ddl_plan.apply(current_schema, context) return current_schema